From 70f7398d4ec6cc17a104ddfc9f5b71ff813a4efd Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Mon, 8 Jan 2024 10:31:49 +0100 Subject: [PATCH 01/33] fix IrodsDataRequestListView webdav urls (#1860), refactor --- CHANGELOG.rst | 10 ++++++++++ docs_manual/source/conf.py | 2 +- docs_manual/source/sodar_release_notes.rst | 8 ++++++++ samplesheets/tests/test_views.py | 17 ++++++++++------ samplesheets/views.py | 23 ++++++++++------------ samplesheets/vueapp/package-lock.json | 4 ++-- samplesheets/vueapp/package.json | 2 +- 7 files changed, 43 insertions(+), 23 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index fd0d1815..5fa50d74 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,16 @@ Changelog for the SODAR project. Loosely follows the `Keep a Changelog `_ guidelines. +Unreleased +========== + +Fixed +----- + +- **Samplesheets** + - Invalid WebDAV URLs generated in ``IrodsDataRequestListView`` (#1860) + + v0.14.1 (2023-12-12) ==================== diff --git a/docs_manual/source/conf.py b/docs_manual/source/conf.py index 4a4a2d27..8360d084 100644 --- a/docs_manual/source/conf.py +++ b/docs_manual/source/conf.py @@ -26,7 +26,7 @@ author = 'BIH Core Unit Bioinformatics' # The full version, including alpha/beta/rc tags -release = '0.14.1' +release = '0.14.2-WIP' # -- General configuration --------------------------------------------------- diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 14a5af7b..f657ad54 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -8,6 +8,14 @@ list of changes in current and previous releases, see the :ref:`full changelog`. +v0.14.2 (WIP) +============ + +Release for minor updates, maintenance and bug fixes. + +- Fix iRODS data request view issues + + v0.14.1 (2023-12-12) ==================== diff --git a/samplesheets/tests/test_views.py b/samplesheets/tests/test_views.py index 989c398d..e463edeb 100644 --- a/samplesheets/tests/test_views.py +++ b/samplesheets/tests/test_views.py @@ -1552,10 +1552,10 @@ class TestIrodsDataRequestListView( ): """Tests for IrodsDataRequestListView""" - def test_list(self): - """Test GET request for listing delete requests""" + def test_get(self): + """Test IrodsDataRequestListView GET""" self.assertEqual(IrodsDataRequest.objects.count(), 0) - request = self.make_irods_request( + irods_request = self.make_irods_request( project=self.project, action=IRODS_REQUEST_ACTION_DELETE, path=IRODS_FILE_PATH, @@ -1571,9 +1571,14 @@ def test_list(self): ) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['object_list']), 1) - self.assertEqual(response.context['object_list'][0], request) + context_obj = response.context['object_list'][0] + self.assertEqual(context_obj, irods_request) + self.assertEqual( + context_obj.webdav_url, 'https://127.0.0.1' + IRODS_FILE_PATH + ) # Ensure no extra slash is between host and iRODS path + self.assertEqual(context_obj.is_collection, False) - def test_list_as_contributor_by_superuser(self): + def test_get_as_contributor_by_superuser(self): """Test GET as contibutor with request created by superuser""" self.make_irods_request( project=self.project, @@ -1592,7 +1597,7 @@ def test_list_as_contributor_by_superuser(self): self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['object_list']), 0) - def test_list_as_contributor2_by_contributor(self): + def test_get_as_contributor2_by_contributor(self): """Test GET as contributor2 with request created by contributor""" user_contributor2 = self.make_user('user_contributor2') self.make_assignment( diff --git a/samplesheets/views.py b/samplesheets/views.py index 55a03e46..953d182c 100644 --- a/samplesheets/views.py +++ b/samplesheets/views.py @@ -12,6 +12,7 @@ from cubi_isa_templates import _TEMPLATES as ISA_TEMPLATES from irods.exception import CollectionDoesNotExist from packaging import version +from urllib.parse import urljoin from django.conf import settings from django.contrib import messages @@ -2864,13 +2865,21 @@ class IrodsDataRequestListView( template_name = 'samplesheets/irods_requests.html' paginate_by = settings.SHEETS_IRODS_REQUEST_PAGINATION + @classmethod + def get_item_extra_data(cls, irods_session, item): + if settings.IRODS_WEBDAV_ENABLED: + item.webdav_url = urljoin(settings.IRODS_WEBDAV_URL, item.path) + else: + item.webdav_url = None + item.is_collection = irods_session.collections.exists(item.path) + def get_context_data(self, *args, **kwargs): context_data = super().get_context_data(*args, **kwargs) irods = get_backend_api('omics_irods') with irods.get_session() as irods_session: if settings.IRODS_WEBDAV_ENABLED: for item in context_data['object_list']: - self.get_extra_item_data(irods_session, item) + self.get_item_extra_data(irods_session, item) assign = RoleAssignment.objects.filter( project=self.get_project(), user=self.request.user, @@ -2899,18 +2908,6 @@ def get_queryset(self): # For regular users, dispaly their own requests regardless of status return queryset.filter(user=self.request.user) - def build_webdav_url(self, item): - return '{}/{}'.format(settings.IRODS_WEBDAV_URL, item.path) - - def get_extra_item_data(self, irods_session, item): - # Add webdav URL to the item - if settings.IRODS_WEBDAV_ENABLED: - item.webdav_url = self.build_webdav_url(item) - else: - item.webdav_url = None - # Check if the item is a collection - item.is_collection = irods_session.collections.exists(item.path) - def get(self, request, *args, **kwargs): irods_backend = get_backend_api('omics_irods') if not irods_backend: diff --git a/samplesheets/vueapp/package-lock.json b/samplesheets/vueapp/package-lock.json index ee92aecd..90362481 100644 --- a/samplesheets/vueapp/package-lock.json +++ b/samplesheets/vueapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "samplesheets", - "version": "0.14.1", + "version": "0.14.2-WIP", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "samplesheets", - "version": "0.14.1", + "version": "0.14.2-WIP", "dependencies": { "bootstrap-vue": "^2.22.0", "core-js": "^3.23.5", diff --git a/samplesheets/vueapp/package.json b/samplesheets/vueapp/package.json index 70d4b455..ec7403bc 100644 --- a/samplesheets/vueapp/package.json +++ b/samplesheets/vueapp/package.json @@ -1,6 +1,6 @@ { "name": "samplesheets", - "version": "0.14.1", + "version": "0.14.2-WIP", "private": true, "scripts": { "serve": "vue-cli-service serve", From 3cb7bb640c0891639b6388cecee4e5e52acab882 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Mon, 8 Jan 2024 12:17:15 +0100 Subject: [PATCH 02/33] fix IrodsDataRequest superuser update (#1863, #1864), refactor --- CHANGELOG.rst | 2 + .../samplesheets/irods_requests.html | 2 +- samplesheets/tests/test_views_api_taskflow.py | 14 ++ samplesheets/tests/test_views_taskflow.py | 48 +++--- samplesheets/views.py | 146 +++++++++--------- 5 files changed, 119 insertions(+), 93 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5fa50d74..2b01279e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -13,6 +13,8 @@ Fixed - **Samplesheets** - Invalid WebDAV URLs generated in ``IrodsDataRequestListView`` (#1860) + - Superuser not allowed to edit iRODS request from other users in UI (#1863) + - ``IrodsDataRequest`` user changed on object update (#1864) v0.14.1 (2023-12-12) diff --git a/samplesheets/templates/samplesheets/irods_requests.html b/samplesheets/templates/samplesheets/irods_requests.html index 779e186f..f8741bec 100644 --- a/samplesheets/templates/samplesheets/irods_requests.html +++ b/samplesheets/templates/samplesheets/irods_requests.html @@ -181,7 +181,7 @@

iRODS Delete Requests

Reject Request {% endif %} - {% if irods_request.user == request.user %} + {% if request.user.is_superuser or irods_request.user == request.user %} {% if irods_request.status == 'ACTIVE' or irods_request.status == 'FAILED' %} diff --git a/samplesheets/tests/test_views_api_taskflow.py b/samplesheets/tests/test_views_api_taskflow.py index 80c19ff4..da68980a 100644 --- a/samplesheets/tests/test_views_api_taskflow.py +++ b/samplesheets/tests/test_views_api_taskflow.py @@ -928,6 +928,20 @@ def test_patch(self): self.assertEqual(self.request.path, self.obj_path) self._assert_tl_count(1) + def test_patch_superuser(self): + """Test PATCH as superuser""" + self._assert_tl_count(0) + update_data = {'description': IRODS_REQUEST_DESC_UPDATED} + response = self.request_knox( + self.url, 'PATCH', data=update_data, token=self.get_token(self.user) + ) + self.assertEqual(response.status_code, 200) + self.request.refresh_from_db() + self.assertEqual(self.request.description, IRODS_REQUEST_DESC_UPDATED) + self.assertEqual(self.request.path, self.obj_path) + self.assertEqual(self.request.user, self.user_contributor) + self._assert_tl_count(1) + # NOTE: For TestIrodsDataRequestDestroyAPIView, see test_views_api diff --git a/samplesheets/tests/test_views_taskflow.py b/samplesheets/tests/test_views_taskflow.py index c8cfc757..260798ec 100644 --- a/samplesheets/tests/test_views_taskflow.py +++ b/samplesheets/tests/test_views_taskflow.py @@ -1323,9 +1323,13 @@ def setUp(self): user=self.user_contributor, description=IRODS_REQUEST_DESC, ) + self.url = reverse( + 'samplesheets:irods_request_update', + kwargs={'irodsdatarequest': self.request.sodar_uuid}, + ) - def test_update(self): - """Test POST request for updating a delete request""" + def test_post(self): + """Test IrodsDataRequestUpdateView POST""" self.assertEqual(IrodsDataRequest.objects.count(), 1) self._assert_tl_count(EVENT_UPDATE, 0) @@ -1334,13 +1338,7 @@ def test_update(self): 'description': IRODS_REQUEST_DESC_UPDATE, } with self.login(self.user_contributor): - response = self.client.post( - reverse( - 'samplesheets:irods_request_update', - kwargs={'irodsdatarequest': self.request.sodar_uuid}, - ), - update_data, - ) + response = self.client.post(self.url, update_data) self.assertRedirects( response, reverse( @@ -1361,8 +1359,28 @@ def test_update(self): self.assertEqual(self.request.description, IRODS_REQUEST_DESC_UPDATE) self._assert_tl_count(EVENT_UPDATE, 1) - def test_post_update_invalid_form_data(self): - """Test updating a delete request with invalid form data""" + def test_post_superuser(self): + """Test POST as superuser""" + self.assertEqual(IrodsDataRequest.objects.count(), 1) + self._assert_tl_count(EVENT_UPDATE, 0) + + update_data = { + 'path': self.obj_path, + 'description': IRODS_REQUEST_DESC_UPDATE, + } + with self.login(self.user): + self.client.post(self.url, update_data) + + self.assertEqual(IrodsDataRequest.objects.count(), 1) + self.request.refresh_from_db() + self.assertEqual(self.request.path, self.obj_path) + self.assertEqual(self.request.description, IRODS_REQUEST_DESC_UPDATE) + # Assert user is not updated when superuser updates the request + self.assertEqual(self.request.user, self.user_contributor) + self._assert_tl_count(EVENT_UPDATE, 1) + + def test_post_invalid_form_data(self): + """Test POST with invalid form data""" self.assertEqual(IrodsDataRequest.objects.count(), 1) self._assert_tl_count(EVENT_UPDATE, 0) @@ -1371,13 +1389,7 @@ def test_post_update_invalid_form_data(self): 'description': IRODS_REQUEST_DESC_UPDATE, } with self.login(self.user_contributor): - response = self.client.post( - reverse( - 'samplesheets:irods_request_update', - kwargs={'irodsdatarequest': self.request.sodar_uuid}, - ), - update_data, - ) + response = self.client.post(self.url, update_data) self.assertEqual( response.context['form'].errors['path'][0], diff --git a/samplesheets/views.py b/samplesheets/views.py index 953d182c..92b821bb 100644 --- a/samplesheets/views.py +++ b/samplesheets/views.py @@ -2381,6 +2381,76 @@ def delete(self, request, *args, **kwargs): return super().delete(request, *args, **kwargs) +class IrodsDataRequestListView( + LoginRequiredMixin, + LoggedInPermissionMixin, + ProjectPermissionMixin, + InvestigationContextMixin, + ListView, +): + """View for listing iRODS data requests""" + + model = IrodsDataRequest + permission_required = 'samplesheets.edit_sheet' + template_name = 'samplesheets/irods_requests.html' + paginate_by = settings.SHEETS_IRODS_REQUEST_PAGINATION + + @classmethod + def get_item_extra_data(cls, irods_session, item): + if settings.IRODS_WEBDAV_ENABLED: + item.webdav_url = urljoin(settings.IRODS_WEBDAV_URL, item.path) + else: + item.webdav_url = None + item.is_collection = irods_session.collections.exists(item.path) + + def get_context_data(self, *args, **kwargs): + context_data = super().get_context_data(*args, **kwargs) + irods = get_backend_api('omics_irods') + with irods.get_session() as irods_session: + if settings.IRODS_WEBDAV_ENABLED: + for item in context_data['object_list']: + self.get_item_extra_data(irods_session, item) + assign = RoleAssignment.objects.filter( + project=self.get_project(), + user=self.request.user, + role__name=SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR'], + ) + context_data['is_contributor'] = bool(assign) + context_data['irods_webdav_enabled'] = settings.IRODS_WEBDAV_ENABLED + return context_data + + def get_queryset(self): + project = self.get_project() + queryset = self.model.objects.filter(project=project) + # For superusers, owners and delegates, + # display active/failed requests from all users + if ( + self.request.user.is_superuser + or project.is_delegate(self.request.user) + or project.is_owner(self.request.user) + ): + return queryset.filter( + status__in=[ + IRODS_REQUEST_STATUS_ACTIVE, + IRODS_REQUEST_STATUS_FAILED, + ] + ) + # For regular users, dispaly their own requests regardless of status + return queryset.filter(user=self.request.user) + + def get(self, request, *args, **kwargs): + irods_backend = get_backend_api('omics_irods') + if not irods_backend: + messages.error(request, 'iRODS backend not enabled') + return redirect( + reverse( + 'samplesheets:project_sheets', + kwargs={'project': self.get_project().sodar_uuid}, + ) + ) + return super().get(request, *args, **kwargs) + + class IrodsDataRequestCreateView( LoginRequiredMixin, LoggedInPermissionMixin, @@ -2405,6 +2475,7 @@ def form_valid(self, form): project = self.get_project() # Create database object obj = form.save(commit=False) + # TODO: These should happen in the form instead (see #1865) obj.user = self.request.user obj.project = project obj.save() @@ -2456,10 +2527,7 @@ def get_form_kwargs(self): return kwargs def form_valid(self, form): - obj = form.save(commit=False) - obj.user = self.request.user - obj.project = self.get_project() - obj.save() + obj = form.save() self.add_tl_update(obj) messages.success( self.request, @@ -2851,76 +2919,6 @@ def post(self, request, *args, **kwargs): ) -class IrodsDataRequestListView( - LoginRequiredMixin, - LoggedInPermissionMixin, - ProjectPermissionMixin, - InvestigationContextMixin, - ListView, -): - """View for listing iRODS data requests""" - - model = IrodsDataRequest - permission_required = 'samplesheets.edit_sheet' - template_name = 'samplesheets/irods_requests.html' - paginate_by = settings.SHEETS_IRODS_REQUEST_PAGINATION - - @classmethod - def get_item_extra_data(cls, irods_session, item): - if settings.IRODS_WEBDAV_ENABLED: - item.webdav_url = urljoin(settings.IRODS_WEBDAV_URL, item.path) - else: - item.webdav_url = None - item.is_collection = irods_session.collections.exists(item.path) - - def get_context_data(self, *args, **kwargs): - context_data = super().get_context_data(*args, **kwargs) - irods = get_backend_api('omics_irods') - with irods.get_session() as irods_session: - if settings.IRODS_WEBDAV_ENABLED: - for item in context_data['object_list']: - self.get_item_extra_data(irods_session, item) - assign = RoleAssignment.objects.filter( - project=self.get_project(), - user=self.request.user, - role__name=SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR'], - ) - context_data['is_contributor'] = bool(assign) - context_data['irods_webdav_enabled'] = settings.IRODS_WEBDAV_ENABLED - return context_data - - def get_queryset(self): - project = self.get_project() - queryset = self.model.objects.filter(project=project) - # For superusers, owners and delegates, - # display active/failed requests from all users - if ( - self.request.user.is_superuser - or project.is_delegate(self.request.user) - or project.is_owner(self.request.user) - ): - return queryset.filter( - status__in=[ - IRODS_REQUEST_STATUS_ACTIVE, - IRODS_REQUEST_STATUS_FAILED, - ] - ) - # For regular users, dispaly their own requests regardless of status - return queryset.filter(user=self.request.user) - - def get(self, request, *args, **kwargs): - irods_backend = get_backend_api('omics_irods') - if not irods_backend: - messages.error(request, 'iRODS backend not enabled') - return redirect( - reverse( - 'samplesheets:project_sheets', - kwargs={'project': self.get_project().sodar_uuid}, - ) - ) - return super().get(request, *args, **kwargs) - - class SheetRemoteSyncView( LoginRequiredMixin, LoggedInPermissionMixin, From 9cdefa9f174bd58063944cfa4734208e06c644a5 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Fri, 12 Jan 2024 11:05:11 +0100 Subject: [PATCH 03/33] fix legacy IrodsDataRequest modifying, cosmetic updates (#1858, #1862) --- CHANGELOG.rst | 8 ++++++++ samplesheets/models.py | 3 ++- samplesheets/tests/test_models.py | 9 +++++++-- samplesheets/views.py | 29 +++++++++++++++++------------ samplesheets/views_ajax.py | 2 +- 5 files changed, 35 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 2b01279e..e79df3f4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -8,6 +8,13 @@ Changelog for the SODAR project. Loosely follows the Unreleased ========== +Changed +------- + +- **Samplesheets** + - Improve Django messages for ``IrodsDataRequest`` exceptions (#1858) + - Change ``IrodsDataRequest`` description if created in Ajax view (#1862) + Fixed ----- @@ -15,6 +22,7 @@ Fixed - Invalid WebDAV URLs generated in ``IrodsDataRequestListView`` (#1860) - Superuser not allowed to edit iRODS request from other users in UI (#1863) - ``IrodsDataRequest`` user changed on object update (#1864) + - ``IrodsDataRequest._validate_action()`` failing with ``delete`` action (#1858) v0.14.1 (2023-12-12) diff --git a/samplesheets/models.py b/samplesheets/models.py index 96d0c5d0..5d939ffc 100644 --- a/samplesheets/models.py +++ b/samplesheets/models.py @@ -1381,7 +1381,8 @@ def save(self, *args, **kwargs): def _validate_action(self): """Validate the action field""" - if self.action != IRODS_REQUEST_ACTION_DELETE: + # Compare against uppercase string to support legacy requests + if self.action.upper() != IRODS_REQUEST_ACTION_DELETE: raise ValidationError( 'This model currently only supports the action "{}"'.format( IRODS_REQUEST_ACTION_DELETE diff --git a/samplesheets/tests/test_models.py b/samplesheets/tests/test_models.py index 249544e7..22629a13 100644 --- a/samplesheets/tests/test_models.py +++ b/samplesheets/tests/test_models.py @@ -1602,12 +1602,17 @@ def test__repr__(self): ) self.assertEqual(repr(self.request), expected) - def test_validate_action(self): - """Test _validate_action()""" + def test_validate_action_move(self): + """Test _validate_action() with MOVE status""" with self.assertRaises(ValidationError): self.request.action = 'MOVE' self.request.save() + def test_validate_action_legacy(self): + """Test _validate_action() with legacy lowercase status""" + self.request.action = 'delete' + self.request.save() # No exception + def test_validate_status(self): """Test _validate_status()""" with self.assertRaises(ValidationError): diff --git a/samplesheets/views.py b/samplesheets/views.py index 92b821bb..b3550dbf 100644 --- a/samplesheets/views.py +++ b/samplesheets/views.py @@ -1161,6 +1161,21 @@ def get_irods_request_objects(self): return IrodsDataRequest.objects.none() return IrodsDataRequest.objects.filter(sodar_uuid__in=request_ids) + def add_error_message(self, obj, ex): + """ + Add Django error message for iRODS data request exception. + + :param obj: IrodsDataRequest object + :param ex: Exception + """ + ex_msg = '; '.join(ex) if isinstance(ex, list) else str(ex) + messages.error( + self.request, + 'Accepting iRODS data request "{}" failed: {}'.format( + obj.get_display_name(), ex_msg + ), + ) + class SheetRemoteSyncAPI(SheetImportMixin): """ @@ -2667,12 +2682,7 @@ def post(self, request, *args, **kwargs): ), ) except Exception as ex: - messages.error( - self.request, - 'Accepting iRODS data request "{}" failed: {}'.format( - obj.get_display_name(), ex - ), - ) + self.add_error_message(obj, ex) return redirect( reverse( 'samplesheets:irods_requests', @@ -2766,12 +2776,7 @@ def post(self, request, *args, **kwargs): ), ) except Exception as ex: - messages.error( - self.request, - 'Accepting iRODS data request "{}" failed: {}'.format( - obj.get_display_name(), ex - ), - ) + self.add_error_message(obj, ex) return redirect( reverse( diff --git a/samplesheets/views_ajax.py b/samplesheets/views_ajax.py index 19b2b996..29e806d2 100644 --- a/samplesheets/views_ajax.py +++ b/samplesheets/views_ajax.py @@ -1869,7 +1869,7 @@ def post(self, request, *args, **kwargs): path=path, user=request.user, project=project, - description='Request created via Ajax API', + description='Request created in iRODS file list', ) # Create timeline event From 828d865f1e9b31030088945e12504ee91aa54b61 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Thu, 18 Jan 2024 16:29:04 +0100 Subject: [PATCH 04/33] fix ldap tls env var retrieval (#1853) --- CHANGELOG.rst | 8 ++++++++ config/settings/base.py | 10 ++-------- docs_manual/source/sodar_release_notes.rst | 3 ++- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e79df3f4..c671fb1b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -18,12 +18,20 @@ Changed Fixed ----- +- **General** + - Invalid env var retrieval for ``AUTH_LDAP*_START_TLS`` (#1853) - **Samplesheets** - Invalid WebDAV URLs generated in ``IrodsDataRequestListView`` (#1860) - Superuser not allowed to edit iRODS request from other users in UI (#1863) - ``IrodsDataRequest`` user changed on object update (#1864) - ``IrodsDataRequest._validate_action()`` failing with ``delete`` action (#1858) +Removed +------- + +- **General** + - LDAP settings ``OPT_X_TLS_REQUIRE_CERT`` workaround (#1853) + v0.14.1 (2023-12-12) ==================== diff --git a/config/settings/base.py b/config/settings/base.py index e35435ae..0c7bc439 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -365,14 +365,12 @@ 'last_name': 'sn', 'email': 'mail', } - # Temporarily disable cert checking (see issue #1853) - ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER) # Primary LDAP server AUTH_LDAP_SERVER_URI = env.str('AUTH_LDAP_SERVER_URI', None) AUTH_LDAP_BIND_DN = env.str('AUTH_LDAP_BIND_DN', None) AUTH_LDAP_BIND_PASSWORD = env.str('AUTH_LDAP_BIND_PASSWORD', None) - AUTH_LDAP_START_TLS = env.str('AUTH_LDAP_START_TLS', False) + AUTH_LDAP_START_TLS = env.bool('AUTH_LDAP_START_TLS', False) AUTH_LDAP_CA_CERT_FILE = env.str('AUTH_LDAP_CA_CERT_FILE', None) AUTH_LDAP_CONNECTION_OPTIONS = {**LDAP_DEFAULT_CONN_OPTIONS} if AUTH_LDAP_CA_CERT_FILE: @@ -383,7 +381,6 @@ AUTH_LDAP_USER_FILTER = env.str( 'AUTH_LDAP_USER_FILTER', '(sAMAccountName=%(user)s)' ) - AUTH_LDAP_USER_SEARCH = LDAPSearch( env.str('AUTH_LDAP_USER_SEARCH_BASE', None), ldap.SCOPE_SUBTREE, @@ -394,7 +391,6 @@ AUTH_LDAP_DOMAIN_PRINTABLE = env.str( 'AUTH_LDAP_DOMAIN_PRINTABLE', AUTH_LDAP_USERNAME_DOMAIN ) - AUTHENTICATION_BACKENDS = tuple( itertools.chain( ('projectroles.auth_backends.PrimaryLDAPBackend',), @@ -407,7 +403,7 @@ AUTH_LDAP2_SERVER_URI = env.str('AUTH_LDAP2_SERVER_URI', None) AUTH_LDAP2_BIND_DN = env.str('AUTH_LDAP2_BIND_DN', None) AUTH_LDAP2_BIND_PASSWORD = env.str('AUTH_LDAP2_BIND_PASSWORD', None) - AUTH_LDAP2_START_TLS = env.str('AUTH_LDAP2_START_TLS', False) + AUTH_LDAP2_START_TLS = env.bool('AUTH_LDAP2_START_TLS', False) AUTH_LDAP2_CA_CERT_FILE = env.str('AUTH_LDAP2_CA_CERT_FILE', None) AUTH_LDAP2_CONNECTION_OPTIONS = {**LDAP_DEFAULT_CONN_OPTIONS} if AUTH_LDAP2_CA_CERT_FILE: @@ -418,7 +414,6 @@ AUTH_LDAP2_USER_FILTER = env.str( 'AUTH_LDAP2_USER_FILTER', '(sAMAccountName=%(user)s)' ) - AUTH_LDAP2_USER_SEARCH = LDAPSearch( env.str('AUTH_LDAP2_USER_SEARCH_BASE', None), ldap.SCOPE_SUBTREE, @@ -429,7 +424,6 @@ AUTH_LDAP2_DOMAIN_PRINTABLE = env.str( 'AUTH_LDAP2_DOMAIN_PRINTABLE', AUTH_LDAP2_USERNAME_DOMAIN ) - AUTHENTICATION_BACKENDS = tuple( itertools.chain( ('projectroles.auth_backends.SecondaryLDAPBackend',), diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index f657ad54..3f191bf1 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -13,7 +13,8 @@ v0.14.2 (WIP) Release for minor updates, maintenance and bug fixes. -- Fix iRODS data request view issues +- Fix iRODS data request issues +- Fix LDAP TLS settings v0.14.1 (2023-12-12) From ada1d860c1561afc84ade2ab153ba2a3dbe35818 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Fri, 19 Jan 2024 09:48:13 +0100 Subject: [PATCH 05/33] upgrade follow-redirects (#1854) --- CHANGELOG.rst | 2 ++ samplesheets/vueapp/package-lock.json | 12 ++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c671fb1b..164815ab 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -11,6 +11,8 @@ Unreleased Changed ------- +- **General** + - Upgrade critical Vue app dependencies (#1854) - **Samplesheets** - Improve Django messages for ``IrodsDataRequest`` exceptions (#1858) - Change ``IrodsDataRequest`` description if created in Ajax view (#1862) diff --git a/samplesheets/vueapp/package-lock.json b/samplesheets/vueapp/package-lock.json index 90362481..ac9dc64f 100644 --- a/samplesheets/vueapp/package-lock.json +++ b/samplesheets/vueapp/package-lock.json @@ -8125,9 +8125,9 @@ "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", - "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", + "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", "dev": true, "funding": [ { @@ -23401,9 +23401,9 @@ "dev": true }, "follow-redirects": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", - "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", + "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", "dev": true }, "for-each": { From e68871971fd7fe2ee1881e324b7b9024880858c8 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Fri, 19 Jan 2024 16:59:43 +0100 Subject: [PATCH 06/33] fix zone stats badge display for superusers (#1866) --- CHANGELOG.rst | 2 ++ landingzones/static/landingzones/js/landingzones.js | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 164815ab..c690a080 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -22,6 +22,8 @@ Fixed - **General** - Invalid env var retrieval for ``AUTH_LDAP*_START_TLS`` (#1853) +- **Landingzones** + - Stats badge displayed to superusers for ``DELETED`` zones (#1866) - **Samplesheets** - Invalid WebDAV URLs generated in ``IrodsDataRequestListView`` (#1860) - Superuser not allowed to edit iRODS request from other users in UI (#1863) diff --git a/landingzones/static/landingzones/js/landingzones.js b/landingzones/static/landingzones/js/landingzones.js index 7cb82c82..299329f2 100644 --- a/landingzones/static/landingzones/js/landingzones.js +++ b/landingzones/static/landingzones/js/landingzones.js @@ -62,7 +62,6 @@ var updateZoneStatus = function() { } if (['CREATING', 'NOT CREATED', 'MOVED', 'DELETED'].includes(zoneStatus)) { zoneTr.find('p#sodar-lz-zone-stats-container-' + zoneUuid).hide(); - if (zoneStatus === 'MOVED') { var statusMovedSpan = zoneTr.find( 'span#sodar-lz-zone-status-moved-' + zoneUuid @@ -78,7 +77,6 @@ var updateZoneStatus = function() { } // Button modification - // if (zoneStatus !== 'ACTIVE' && zoneStatus !== 'FAILED' && isSuperuser) {} if (zoneStatus !== 'ACTIVE' && zoneStatus !== 'FAILED' && !isSuperuser) { zoneTr.find('td.sodar-lz-zone-title').addClass('text-muted'); zoneTr.find('td.sodar-lz-zone-assay').addClass('text-muted'); @@ -96,7 +94,9 @@ var updateZoneStatus = function() { zoneTr.find('td.sodar-lz-zone-title').removeClass('text-muted'); zoneTr.find('td.sodar-lz-zone-assay').removeClass('text-muted'); zoneTr.find('td.sodar-lz-zone-status-info').removeClass('text-muted'); - zoneTr.find('p#sodar-lz-zone-stats-container-' + zoneUuid).show(); + if (zoneStatus !== 'DELETED') { + zoneTr.find('p#sodar-lz-zone-stats-container-' + zoneUuid).show(); + } zoneTr.find('.btn').each(function() { if ($(this).is('button')) { $(this).removeAttr('disabled'); From 5483cd4e073ad59c99da18bec81e9bf347a66507 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Fri, 26 Jan 2024 10:21:24 +0100 Subject: [PATCH 07/33] update installation docs (#1871) --- CHANGELOG.rst | 1 + docs_manual/source/admin_install.rst | 53 ++++++++++++++++------ docs_manual/source/dev_install.rst | 6 +-- docs_manual/source/sodar_release_notes.rst | 3 +- 4 files changed, 45 insertions(+), 18 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c690a080..5af1fa6d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -13,6 +13,7 @@ Changed - **General** - Upgrade critical Vue app dependencies (#1854) + - Update installation documentation (#1871) - **Samplesheets** - Improve Django messages for ``IrodsDataRequest`` exceptions (#1858) - Change ``IrodsDataRequest`` description if created in Ajax view (#1862) diff --git a/docs_manual/source/admin_install.rst b/docs_manual/source/admin_install.rst index f65a98a3..d92ce025 100644 --- a/docs_manual/source/admin_install.rst +++ b/docs_manual/source/admin_install.rst @@ -31,13 +31,19 @@ Docker Compose network if e.g. you already have a separate iRODS server running. - ``traefik``: Reverse proxy for TLS/SSL routing. - ``sssd``: System Security Service Daemon for LDAP/AD authentication. +.. note:: + + Currently the sodar-docker-compose environment only supports iRODS v4.2. + Support for v4.3 is being worked on. iRODS v4.3 will be the default + supported version from SODAR v1.0 onwards. + Quickstart Guide ================ For a guide on how to try out and evaluate SODAR on your Linux workstation, see the `README file `_ of -the SODAR Docker Compose repository. +the sodar-docker-compose repository. Installation Guide @@ -70,7 +76,7 @@ requirements. 1. Clone the Repository ----------------------- -Clone the ``sodar-docker-compose`` repository as follows: +Clone the sodar-docker-compose repository as follows: .. code-block:: bash @@ -141,7 +147,7 @@ settings. Note that in the Docker Compose environment, settings specific to the SODAR web server are prefixed with ``SODAR_*``. This does not include e.g. iRODS settings commonly used by multiple components. -For more information on the iRODS settings, see the +For more information on iRODS settings, see the `iRODS documentation `_. Note that for certain settings to take effect, you need to run the Docker @@ -160,12 +166,12 @@ following command: $ ./run.sh If you have the need to modify the default configuration, you can alternatively -launch the network with the ``docker-compose up`` command with appropriate +launch the network with the ``docker compose up`` command with appropriate parameters: .. code-block:: bash - $ docker-compose -f docker-compose.yml \ + $ docker compose -f docker-compose.yml \ -f docker-compose.override.yml.irods \ -f docker-compose.override.yml.davrods \ -f docker-compose.override.yml.provided-cert \ @@ -237,29 +243,46 @@ restart. If the network is running in the background, enter the following: .. code-block:: bash - $ docker-compose down && docker-compose up -d + $ docker compose down && docker compose up -d For updating all the images to their latest version, run the following: .. code-block:: bash - $ docker-compose pull + $ docker compose pull To only update a specific image, you can do the following: .. code-block:: bash - $ docker-compose pull IMAGE-NAME - $ docker-compose up -d --no-deps --build IMAGE-NAME + $ docker compose pull IMAGE-NAME + $ docker compose up -d --no-deps --build IMAGE-NAME Whenever updating your SODAR environment, it is strongly recommend to ensure -your ``sodar-docker-compose`` repository is up-to-date with the latest version -with the following command: +your sodar-docker-compose repository is up-to-date with the latest version with +the following command: .. code-block:: bash $ git pull origin main +The ``main`` branch of the repository will contain the latest release version of +the environment, which corresponds to the latest SODAR server release. If you +want to deploy an older version of SODAR than the most recent release, check out +the corresponding sodar-docker-compose release according to its git tag. + +If you are installing the "bleeding edge" development version of SODAR ``dev`` +with the ``dev-0`` tag, you should correspondingly use the ``dev`` branch +version of sodar-docker-compose. Note that these versions may contain breaking +changes and/or not yet fully documented features. Deploying the dev version in +production is generally recommended only for experienced SODAR admins. + +.. note:: + + SODAR v1.0 will be upgraded to use iRODS 4.3 and Postgres v16. This version + may require special steps for upgrading an existing environment. Make sure + to refer to the sodar-docker-compose README for instructions. + .. _admin_install_advanced_config: @@ -313,7 +336,11 @@ command. Deploying in Production ======================= -This section details issues specific to deploying SODAR in production. +Deploying the SODAR environment in production is mostly considered out of scope +for this documentation, as the exact method of deployment depends on your +organization's infrastructure and practices. The sodar-docker-compose +environment can be set up for production using e.g. Ansible playbooks. In this +section we present certain guidelines and recommendations for deployment. Production Prerequisites ------------------------ @@ -321,8 +348,6 @@ Production Prerequisites In addition to the :ref:`general prerequisites `, we recommend the following for a production deployment of SODAR: -**TODO:** Update these - - Recommended Hardware - Memory: 64 GB of RAM - CPU: 16 cores diff --git a/docs_manual/source/dev_install.rst b/docs_manual/source/dev_install.rst index dd750d2c..0915ad1f 100644 --- a/docs_manual/source/dev_install.rst +++ b/docs_manual/source/dev_install.rst @@ -111,7 +111,7 @@ use OpenSSL as demonstrated below. Ensure the file is placed under ---------------------------- Copy the file ``env.example.dev`` into ``.env`` to use the default -``sodar-docker-compose`` configuration for development. +sodar-docker-compose configuration for development. .. code-block:: bash @@ -121,7 +121,7 @@ In the case of the development setup, this environment only includes variables available to the external SODAR components. The ``sodar-server`` settings will be set up in a local ``.env`` file we will describe further on in this document. -5. Bring up the Environment +5. Bring Up the Environment --------------------------- To run the environment in the development configuration, use the following @@ -141,7 +141,7 @@ SODAR Server Setup With the required external components running in Docker, you can set up and run the SODAR Django server and other local components. -1. Set up the Repository +1. Set Up the Repository ------------------------ First, clone the ``sodar-server`` repository and install the OS dependencies diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 3f191bf1..83599b9e 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -9,12 +9,13 @@ list of changes in current and previous releases, see the v0.14.2 (WIP) -============ +============= Release for minor updates, maintenance and bug fixes. - Fix iRODS data request issues - Fix LDAP TLS settings +- Minor updates and bug fixes v0.14.1 (2023-12-12) From 3a812fd0ccc42f6f715e3d327a4d559a8d318d74 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Mon, 29 Jan 2024 16:19:41 +0100 Subject: [PATCH 08/33] add zone assay shortcut collection creation (#1869) --- CHANGELOG.rst | 6 +++ docs_manual/source/sodar_release_notes.rst | 1 + landingzones/tests/test_views_taskflow.py | 43 +++++++++++++++++++--- landingzones/views.py | 17 +++++++-- 4 files changed, 58 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5af1fa6d..3272dff6 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -8,6 +8,12 @@ Changelog for the SODAR project. Loosely follows the Unreleased ========== +Added +----- + +- **Landingzones** + - Create assay plugin shortcut collections for zones (#1869) + Changed ------- diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 83599b9e..38a8b6e4 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -13,6 +13,7 @@ v0.14.2 (WIP) Release for minor updates, maintenance and bug fixes. +- Add assay plugin shortcut collection creation in landing zones - Fix iRODS data request issues - Fix LDAP TLS settings - Minor updates and bug fixes diff --git a/landingzones/tests/test_views_taskflow.py b/landingzones/tests/test_views_taskflow.py index 14562bdc..0826c638 100644 --- a/landingzones/tests/test_views_taskflow.py +++ b/landingzones/tests/test_views_taskflow.py @@ -67,6 +67,8 @@ ZONE_BASE_COLLS = [MISC_FILES_COLL, RESULTS_COLL, TRACK_HUBS_COLL] ZONE_PLUGIN_COLLS = ['0815-N1-DNA1', '0815-T1-DNA1'] ZONE_ALL_COLLS = ZONE_BASE_COLLS + ZONE_PLUGIN_COLLS +RAW_DATA_COLL = 'RawData' +MAX_QUANT_COLL = 'MaxQuantResults' class LandingZoneTaskflowMixin: @@ -239,8 +241,7 @@ def test_create_zone_colls(self): 'configuration': '', } with self.login(self.user): - response = self.client.post(self.url, values) - self.assertRedirects(response, self.redirect_url) + self.client.post(self.url, values) self.assert_zone_count(1) zone = LandingZone.objects.first() @@ -287,8 +288,7 @@ def test_create_zone_colls_plugin(self): 'configuration': '', } with self.login(self.user): - response = self.client.post(self.url, values) - self.assertRedirects(response, self.redirect_url) + self.client.post(self.url, values) self.assert_zone_count(1) zone = LandingZone.objects.first() @@ -304,6 +304,38 @@ def test_create_zone_colls_plugin(self): self.assert_irods_access( self.user.username, os.path.join(zone_path, c), IRODS_ACCESS_OWN ) + # These should not be created for this plugin + for c in [MAX_QUANT_COLL, RAW_DATA_COLL]: + self.assert_irods_coll(zone, c, False) + + def test_create_zone_colls_plugin_shortcuts(self): + """Test landingzones creation with shortcut collections in plugin""" + self.assertEqual(LandingZone.objects.count(), 0) + # Set pep_ms plugin + self.assay.measurement_type = {'name': 'protein expression profiling'} + self.assay.technology_type = {'name': 'mass spectrometry'} + self.assay.save() + # NOTE: update_cache() not implemented in this plugin + + values = { + 'assay': str(self.assay.sodar_uuid), + 'title_suffix': ZONE_SUFFIX, + 'description': ZONE_DESC, + 'create_colls': True, + 'restrict_colls': False, + 'configuration': '', + } + with self.login(self.user): + self.client.post(self.url, values) + + self.assert_zone_count(1) + zone = LandingZone.objects.first() + self.assert_zone_status(zone, ZONE_STATUS_ACTIVE) + zone_colls = ZONE_BASE_COLLS + [RAW_DATA_COLL, MAX_QUANT_COLL] + for c in zone_colls: + self.assert_irods_coll(zone, c, True) + for c in ZONE_PLUGIN_COLLS: + self.assert_irods_coll(zone, c, False) # TODO: Test without sodarcache (see issue #1157) @@ -332,8 +364,7 @@ def test_create_zone_colls_plugin_restrict(self): 'configuration': '', } with self.login(self.user): - response = self.client.post(self.url, values) - self.assertRedirects(response, self.redirect_url) + self.client.post(self.url, values) self.assert_zone_count(1) zone = LandingZone.objects.first() diff --git a/landingzones/views.py b/landingzones/views.py index 0421f19c..53356ed5 100644 --- a/landingzones/views.py +++ b/landingzones/views.py @@ -187,6 +187,7 @@ def submit_create( colls = [] if create_colls: logger.debug('Creating default landing zone collections..') + assay_path = irods_backend.get_path(zone.assay) colls = [RESULTS_COLL, MISC_FILES_COLL, TRACK_HUBS_COLL] plugin = zone.assay.get_plugin() # First try the cache @@ -199,14 +200,24 @@ def submit_create( project=zone.project, ) if cache_obj and cache_obj.data: - assay_path = irods_backend.get_path(zone.assay) colls += [ p.replace(assay_path + '/', '') for p in cache_obj.data['paths'].keys() ] logger.debug('Retrieved collections from cache') - elif plugin: - pass # TODO: Build tables, get rows directly from plugin? + # TODO: If no cache, build tables and get rows directly from plugin? + # Add shortcut paths + if plugin: + shortcuts = plugin.get_shortcuts(zone.assay) or [] + for s in shortcuts: + path = s.get('path') + if path: + path = path.replace(assay_path + '/', '') + if path and path not in colls: + colls.append(path) + logger.debug( + 'Added shorctut collection "{}"'.format(s.get('id')) + ) logger.debug('Collections to be created: {}'.format(', '.join(colls))) flow_name = 'landing_zone_create' From 6be73da3e8e66b18c6b5a46fd064aa4669e5d274 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Thu, 1 Feb 2024 16:36:02 +0100 Subject: [PATCH 09/33] fix protocol ref editability for new rows (#1875) --- CHANGELOG.rst | 1 + samplesheets/vueapp/src/App.vue | 9 ++++----- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 3272dff6..d05a5aeb 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -36,6 +36,7 @@ Fixed - Superuser not allowed to edit iRODS request from other users in UI (#1863) - ``IrodsDataRequest`` user changed on object update (#1864) - ``IrodsDataRequest._validate_action()`` failing with ``delete`` action (#1858) + - Protocol ref editable for new row if disabled in column config (#1875) Removed ------- diff --git a/samplesheets/vueapp/src/App.vue b/samplesheets/vueapp/src/App.vue index ce49b0a2..039418c3 100644 --- a/samplesheets/vueapp/src/App.vue +++ b/samplesheets/vueapp/src/App.vue @@ -759,7 +759,6 @@ export default { let enableNextIdx = null // If the next node is a material, enable editing its name - // Else if it's a process, enable editing for all cells (if available) if (nextNodeCls === 'GenericMaterial') { const itemType = cols[startIdx].colDef.cellEditorParams.headerInfo.item_type const headerType = cols[startIdx].colDef.cellEditorParams.headerInfo.header_type @@ -810,7 +809,7 @@ export default { rowNode.setDataValue(cols[i].colId, value) } } - } else if (nextNodeCls === 'Process') { + } else if (nextNodeCls === 'Process') { // Handle process node enabling let i = startIdx let processActive = false let newInit = true @@ -821,9 +820,10 @@ export default { nextColId = cols[i].colId const value = this.getDefaultValue(nextColId, gridOptions, newInit, forceEmpty) const headerType = cols[i].colDef.cellEditorParams.headerInfo.header_type + const fieldEditable = cols[i].colDef.cellRendererParams.fieldEditable if (headerType === 'protocol') { - value.editable = true + value.editable = fieldEditable if ('uuid_ref' in value && value.uuid_ref) { processActive = true newInit = false @@ -842,13 +842,12 @@ export default { } else { // Only allow editing the rest of the cells if protocol is set if (processActive) { - value.editable = cols[i].colDef.cellRendererParams.fieldEditable + value.editable = fieldEditable } else value.editable = false } rowNode.setDataValue(nextColId, value) i += 1 } - // If default protocol or name was filled, enable the next node(s) too if (processActive) enableNextIdx = i } From a3409febb0aad015a1a7a536adaa353571795b15 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Mon, 5 Feb 2024 17:28:39 +0100 Subject: [PATCH 10/33] update irods object list retrieval (#1882, #1883) --- CHANGELOG.rst | 7 ++ irodsbackend/api.py | 62 ++++++----------- irodsbackend/tests/test_api_taskflow.py | 77 ++++++---------------- irodsbackend/views.py | 25 +++++-- samplesheets/plugins.py | 4 +- samplesheets/studyapps/cancer/utils.py | 2 +- samplesheets/studyapps/germline/plugins.py | 6 +- samplesheets/studyapps/germline/utils.py | 2 +- samplesheets/views_ajax.py | 16 ++--- samplesheets/views_api.py | 4 +- taskflowbackend/flows/landing_zone_move.py | 45 +++++-------- taskflowbackend/irods_utils.py | 22 ------- taskflowbackend/tests/base.py | 5 +- 13 files changed, 102 insertions(+), 175 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d05a5aeb..2d11c62d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -20,9 +20,14 @@ Changed - **General** - Upgrade critical Vue app dependencies (#1854) - Update installation documentation (#1871) +- **Irodsbackend** + - Reduce redundant object queries (#1883) + - Change method logic in ``get_objects()`` and ``get_objs_recursively()`` (#1883) - **Samplesheets** - Improve Django messages for ``IrodsDataRequest`` exceptions (#1858) - Change ``IrodsDataRequest`` description if created in Ajax view (#1862) +- **Taskflowbackend** + - Optimize ``landing_zone_move`` iRODS path retrieval (#1882) Fixed ----- @@ -43,6 +48,8 @@ Removed - **General** - LDAP settings ``OPT_X_TLS_REQUIRE_CERT`` workaround (#1853) +- **Taskflowbackend** + - ``get_subcoll_obj_paths()`` and ``get_subcoll_paths()`` helpers (#1882) v0.14.1 (2023-12-12) diff --git a/irodsbackend/api.py b/irodsbackend/api.py index 34ebb994..f8d53113 100644 --- a/irodsbackend/api.py +++ b/irodsbackend/api.py @@ -582,7 +582,7 @@ def get_colls_recursively(cls, coll): return [iRODSCollection(coll.manager, row) for row in query] def get_objs_recursively( - self, irods, coll, md5=False, name_like=None, limit=None + self, irods, coll, include_md5=False, name_like=None, limit=None ): """ Return objects below a coll recursively. Replacement for the @@ -591,13 +591,13 @@ def get_objs_recursively( :param irods: iRODSSession object :param coll: Collection object - :param md5: if True, return .md5 files, otherwise anything but them + :param include_md5: if True, include .md5 files :param name_like: Filtering of file names (string or list of strings) - :param limit: Limit search to n rows (int) + :param limit: Limit retrieval to n rows (int) :return: List """ ret = [] - md5_filter = 'LIKE' if md5 else 'NOT LIKE' + md5_filter = '' if include_md5 else 'AND data_name NOT LIKE \'%.md5\'' path_lookup = [] q_count = 1 @@ -607,8 +607,7 @@ def _do_query(irods, nl=None): 'r_data_main.modify_ts as modify_ts, coll_name ' 'FROM r_data_main JOIN r_coll_main USING (coll_id) ' 'WHERE (coll_name = \'{coll_path}\' ' - 'OR coll_name LIKE \'{coll_path}/%\') ' - 'AND data_name {md5_filter} \'%.md5\''.format( + 'OR coll_name LIKE \'{coll_path}/%\') {md5_filter}'.format( coll_path=coll.path, md5_filter=md5_filter ) ) @@ -621,7 +620,8 @@ def _do_query(irods, nl=None): sql += ' OR ' sql += 'data_name LIKE \'%{}%\''.format(n) sql += ')' - if not md5 and limit: + # TODO: Shouldn't we also allow limit if including .md5 files? + if not include_md5 and limit: sql += ' LIMIT {}'.format(limit) # logger.debug('Object list query = "{}"'.format(sql)) @@ -680,21 +680,21 @@ def get_objects( self, irods, path, - check_md5=False, + include_md5=False, include_colls=False, name_like=None, limit=None, ): """ - Return an iRODS object list. + Return a flat iRODS object list recursively under a given path. :param irods: iRODSSession object :param path: Full path to iRODS collection - :param check_md5: Whether to add md5 checksum file info (bool) + :param include_md5: Include .md5 checksum files (bool) :param include_colls: Include collections (bool) :param name_like: Filtering of file names (string or list of strings) :param limit: Limit search to n rows (int) - :return: Dict + :return: List :raise: FileNotFoundError if collection is not found """ try: @@ -706,43 +706,21 @@ def get_objects( if not isinstance(name_like, list): name_like = [name_like] name_like = [n.replace('_', '\_') for n in name_like] # noqa - ret = {'irods_data': []} - md5_paths = None - - data_objs = self.get_objs_recursively( - irods, coll, name_like=name_like, limit=limit + ret = self.get_objs_recursively( + irods, + coll, + include_md5=include_md5, + name_like=name_like, + limit=limit, ) - if data_objs and check_md5: - md5_paths = [ - o['path'] - for o in self.get_objs_recursively( - irods, coll, md5=True, name_like=name_like - ) - ] - - for o in data_objs: - if check_md5: - if o['path'] + '.md5' in md5_paths: - o['md5_file'] = True - else: - o['md5_file'] = False - ret['irods_data'].append(o) # Add collections if enabled - # TODO: Combine into a single query? + # TODO: Combine into a single query? (see issues #1440, #1883) if include_colls: colls = self.get_colls_recursively(coll) for c in colls: - ret['irods_data'].append( - { - 'name': c.name, - 'type': 'coll', - 'path': c.path, - } - ) - ret['irods_data'] = sorted( - ret['irods_data'], key=lambda x: x['path'] - ) + ret.append({'name': c.name, 'type': 'coll', 'path': c.path}) + ret = sorted(ret, key=lambda x: x['path']) return ret @classmethod diff --git a/irodsbackend/tests/test_api_taskflow.py b/irodsbackend/tests/test_api_taskflow.py index 3282796f..4a138f0d 100644 --- a/irodsbackend/tests/test_api_taskflow.py +++ b/irodsbackend/tests/test_api_taskflow.py @@ -103,18 +103,17 @@ def test_get_objects(self): self.irods.data_objects.create(path + '/' + TEST_FILE_NAME) self.irods.data_objects.create(path + '/{}.md5'.format(TEST_FILE_NAME)) obj_list = self.irods_backend.get_objects( - self.irods, path, check_md5=True + self.irods, path, include_md5=True ) self.assertIsNotNone(obj_list) - self.assertEqual(len(obj_list['irods_data']), 1) # md5 not listed + self.assertEqual(len(obj_list), 2) - obj = obj_list['irods_data'][0] + obj = obj_list[0] expected = { 'name': TEST_FILE_NAME, 'type': 'obj', 'path': path + '/' + TEST_FILE_NAME, 'size': 0, - 'md5_file': True, 'modify_time': obj['modify_time'], } self.assertEqual(obj, expected) @@ -127,10 +126,10 @@ def test_get_objects_with_colls(self): self.irods.data_objects.create(path + '/{}.md5'.format(TEST_FILE_NAME)) self.irods.collections.create(path + '/subcoll') obj_list = self.irods_backend.get_objects( - self.irods, path, include_colls=True, check_md5=True + self.irods, path, include_md5=True, include_colls=True ) self.assertIsNotNone(obj_list) - self.assertEqual(len(obj_list['irods_data']), 2) # md5 not listed + self.assertEqual(len(obj_list), 3) expected = [ { @@ -143,11 +142,17 @@ def test_get_objects_with_colls(self): 'type': 'obj', 'path': path + '/' + TEST_FILE_NAME, 'size': 0, - 'md5_file': True, - 'modify_time': obj_list['irods_data'][1]['modify_time'], + 'modify_time': obj_list[1]['modify_time'], + }, + { + 'name': TEST_FILE_NAME + '.md5', + 'type': 'obj', + 'path': path + '/' + TEST_FILE_NAME + '.md5', + 'size': 0, + 'modify_time': obj_list[2]['modify_time'], }, ] - self.assertEqual(obj_list['irods_data'], expected) + self.assertEqual(obj_list, expected) def test_get_objects_multi(self): """Test get_objects() with multiple search terms""" @@ -161,30 +166,10 @@ def test_get_objects_multi(self): self.irods, path, name_like=[TEST_FILE_NAME, TEST_FILE_NAME2], - check_md5=True, + include_md5=True, ) self.assertIsNotNone(obj_list) - self.assertEqual(len(obj_list['irods_data']), 2) # md5 not listed - - expected = [ - { - 'name': TEST_FILE_NAME, - 'type': 'obj', - 'path': path + '/' + TEST_FILE_NAME, - 'size': 0, - 'md5_file': True, - 'modify_time': obj_list['irods_data'][0]['modify_time'], - }, - { - 'name': TEST_FILE_NAME2, - 'type': 'obj', - 'path': path + '/' + TEST_FILE_NAME2, - 'size': 0, - 'md5_file': True, - 'modify_time': obj_list['irods_data'][1]['modify_time'], - }, - ] - self.assertEqual(obj_list['irods_data'], expected) + self.assertEqual(len(obj_list), 4) def test_get_objects_long_query(self): """Test get_objects() with a long query""" @@ -212,30 +197,10 @@ def test_get_objects_long_query(self): self.irods, path, name_like=[TEST_FILE_NAME, TEST_FILE_NAME2], - check_md5=True, + include_md5=True, ) self.assertIsNotNone(obj_list) - self.assertEqual(len(obj_list['irods_data']), 2) # md5 not listed - - expected = [ - { - 'name': TEST_FILE_NAME, - 'type': 'obj', - 'path': path + '/' + TEST_FILE_NAME, - 'size': 0, - 'md5_file': True, - 'modify_time': obj_list['irods_data'][0]['modify_time'], - }, - { - 'name': TEST_FILE_NAME2, - 'type': 'obj', - 'path': path + '/' + TEST_FILE_NAME2, - 'size': 0, - 'md5_file': True, - 'modify_time': obj_list['irods_data'][1]['modify_time'], - }, - ] - self.assertEqual(obj_list['irods_data'], expected) + self.assertEqual(len(obj_list), 4) def test_get_objects_empty_coll(self): """Test get_objects() with an empty sample collection""" @@ -243,7 +208,7 @@ def test_get_objects_empty_coll(self): path = self.irods_backend.get_path(self.project) + '/' + SAMPLE_COLL obj_list = self.irods_backend.get_objects(self.irods, path) self.assertIsNotNone(obj_list) - self.assertEqual(len(obj_list['irods_data']), 0) + self.assertEqual(len(obj_list), 0) def test_get_objects_no_coll(self): """Test get_objects() with no created collections""" @@ -258,10 +223,10 @@ def test_get_objects_limit(self): self.irods.data_objects.create(path + '/' + TEST_FILE_NAME) self.irods.data_objects.create(path + '/' + TEST_FILE_NAME2) obj_list = self.irods_backend.get_objects( - self.irods, path, check_md5=False, limit=1 + self.irods, path, include_md5=False, limit=1 ) self.assertIsNotNone(obj_list) - self.assertEqual(len(obj_list['irods_data']), 1) # Limited to 1 + self.assertEqual(len(obj_list), 1) # Limited to 1 def test_issue_ticket(self): """Test issue_ticket()""" diff --git a/irodsbackend/views.py b/irodsbackend/views.py index a6bd5e5c..cc5f877b 100644 --- a/irodsbackend/views.py +++ b/irodsbackend/views.py @@ -190,18 +190,31 @@ class IrodsObjectListAjaxView(BaseIrodsAjaxView): permission_required = 'irodsbackend.view_files' def get(self, request, *args, **kwargs): - md5 = request.GET.get('md5') - colls = request.GET.get('colls') + check_md5 = bool(int(request.GET.get('md5'))) + include_colls = bool(int(request.GET.get('colls'))) # Get files try: with self.irods_backend.get_session() as irods: - ret_data = self.irods_backend.get_objects( + objs = self.irods_backend.get_objects( irods, self.path, - check_md5=bool(int(md5)), - include_colls=bool(int(colls)), + include_md5=check_md5, + include_colls=include_colls, ) - return Response(ret_data, status=200) + ret = [] + md5_paths = [] + if check_md5: + md5_paths = [ + o['path'] for o in objs if o['path'].endswith('.md5') + ] + for o in objs: + if o['type'] == 'coll' and include_colls: + ret.append(o) + elif o['type'] == 'obj' and not o['path'].endswith('.md5'): + if check_md5: + o['md5_file'] = o['path'] + '.md5' in md5_paths + ret.append(o) + return Response({'irods_data': ret}, status=200) except Exception as ex: return Response(self._get_detail(ex), status=500) diff --git a/samplesheets/plugins.py b/samplesheets/plugins.py index 9d81bf58..d4959049 100644 --- a/samplesheets/plugins.py +++ b/samplesheets/plugins.py @@ -360,7 +360,7 @@ def _get_search_files(cls, search_terms, user, irods_backend): ret = [] try: with irods_backend.get_session() as irods: - obj_data = irods_backend.get_objects( + obj_list = irods_backend.get_objects( irods=irods, path=irods_backend.get_projects_path(), name_like=search_terms, @@ -386,7 +386,7 @@ def _get_search_files(cls, search_terms, user, irods_backend): for a in Assay.objects.filter(study__in=studies.values()) } - for o in obj_data['irods_data']: + for o in obj_list: project_uuid = irods_backend.get_uuid_from_path( o['path'], obj_type='project' ) diff --git a/samplesheets/studyapps/cancer/utils.py b/samplesheets/studyapps/cancer/utils.py index af161f84..048d2e96 100644 --- a/samplesheets/studyapps/cancer/utils.py +++ b/samplesheets/studyapps/cancer/utils.py @@ -28,7 +28,7 @@ def get_library_file_path(assay, library_name, file_type, irods_backend, irods): override = get_igv_omit_override(assay.get_project(), file_type) try: obj_list = irods_backend.get_objects(irods, query_path) - for obj in obj_list['irods_data']: + for obj in obj_list: if obj['name'].lower().endswith( FILE_TYPE_SUFFIXES[file_type] ) and check_igv_file_name(obj['name'], file_type, override): diff --git a/samplesheets/studyapps/germline/plugins.py b/samplesheets/studyapps/germline/plugins.py index 0655a3ab..d2684f21 100644 --- a/samplesheets/studyapps/germline/plugins.py +++ b/samplesheets/studyapps/germline/plugins.py @@ -310,7 +310,7 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend): study_objs = irods_backend.get_objects( irods, irods_backend.get_path(study) ) - obj_len = len(study_objs['irods_data']) + obj_len = len(study_objs) logger.debug( 'Query returned {} data object{}'.format( obj_len, 's' if obj_len != 1 else '' @@ -349,7 +349,7 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend): if obj_len > 0 and path not in bam_paths[source_name]: bam_paths[source_name] += [ o['path'] - for o in study_objs['irods_data'] + for o in study_objs if o['path'].startswith(path + '/') and o['name'].lower().endswith('bam') and check_igv_file_name(o['name'], 'bam', bam_override) @@ -365,7 +365,7 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend): if obj_len > 0 and path not in vcf_paths[vcf_query_id]: vcf_paths[vcf_query_id] += [ o['path'] - for o in study_objs['irods_data'] + for o in study_objs if o['path'].startswith(path + '/') and o['name'].lower().endswith('vcf.gz') and check_igv_file_name(o['name'], 'vcf', vcf_override) diff --git a/samplesheets/studyapps/germline/utils.py b/samplesheets/studyapps/germline/utils.py index b1563fb5..3038488e 100644 --- a/samplesheets/studyapps/germline/utils.py +++ b/samplesheets/studyapps/germline/utils.py @@ -81,7 +81,7 @@ def _get_val_by_index(row, idx): obj_list = None if obj_list: for query_path in query_paths: - for obj in obj_list['irods_data']: + for obj in obj_list: if ( obj['path'].startswith(query_path + '/') and obj['name'] diff --git a/samplesheets/views_ajax.py b/samplesheets/views_ajax.py index 29e806d2..066a553f 100644 --- a/samplesheets/views_ajax.py +++ b/samplesheets/views_ajax.py @@ -1942,18 +1942,18 @@ def get(self, request, *args, **kwargs): # Get files try: with irods_backend.get_session() as irods: - ret_data = irods_backend.get_objects(irods, self.path) + obj_list = irods_backend.get_objects(irods, self.path) except Exception as ex: return Response({'detail': str(ex)}, status=400) - for data_obj in ret_data.get('irods_data', []): - obj = IrodsDataRequest.objects.filter( - path=data_obj['path'], status__in=['ACTIVE', 'FAILED'] + for o in obj_list: + db_obj = IrodsDataRequest.objects.filter( + path=o['path'], status__in=['ACTIVE', 'FAILED'] ).first() - data_obj['irods_request_status'] = obj.status if obj else None - data_obj['irods_request_user'] = ( - str(obj.user.sodar_uuid) if obj else None + o['irods_request_status'] = db_obj.status if db_obj else None + o['irods_request_user'] = ( + str(db_obj.user.sodar_uuid) if db_obj else None ) - return Response(ret_data, status=200) + return Response({'irods_data': obj_list}, status=200) class SheetVersionCompareAjaxView(SODARBaseProjectAjaxView): diff --git a/samplesheets/views_api.py b/samplesheets/views_api.py index 88498c8d..0f7537d2 100644 --- a/samplesheets/views_api.py +++ b/samplesheets/views_api.py @@ -838,13 +838,13 @@ def get(self, request, *args, **kwargs): path = irods_backend.get_sample_path(project) try: with irods_backend.get_session() as irods: - irods_data = irods_backend.get_objects(irods, path) + obj_list = irods_backend.get_objects(irods, path) except Exception as ex: return Response( {'detail': '{}: {}'.format(IRODS_QUERY_ERROR_MSG, ex)}, status=status.HTTP_404_NOT_FOUND, ) - return Response(irods_data, status=status.HTTP_200_OK) + return Response({'irods_data': obj_list}, status=status.HTTP_200_OK) # TODO: Temporary HACK, should be replaced by proper API view diff --git a/taskflowbackend/flows/landing_zone_move.py b/taskflowbackend/flows/landing_zone_move.py index 217266d5..86ed3d5a 100644 --- a/taskflowbackend/flows/landing_zone_move.py +++ b/taskflowbackend/flows/landing_zone_move.py @@ -12,10 +12,6 @@ from landingzones.models import LandingZone from taskflowbackend.flows.base_flow import BaseLinearFlow -from taskflowbackend.irods_utils import ( - get_subcoll_obj_paths, - get_subcoll_paths, -) from taskflowbackend.tasks import irods_tasks @@ -37,7 +33,6 @@ def validate(self): return super().validate() def build(self, force_fail=False): - # Setup validate_only = self.flow_data.get('validate_only', False) zone = LandingZone.objects.get(sodar_uuid=self.flow_data['zone_uuid']) project_group = self.irods_backend.get_user_group_name(self.project) @@ -54,39 +49,31 @@ def build(self, force_fail=False): ), ) - # Get landing zone file paths (without .md5 files) from iRODS - zone_coll = self.irods.collections.get(zone_path) - zone_objects = get_subcoll_obj_paths(zone_coll) - zone_objects_nomd5 = list( - set( - [ - p - for p in zone_objects - if p[p.rfind('.') + 1 :].lower() != 'md5' - ] - ) - ) - zone_objects_md5 = list( - set([p for p in zone_objects if p not in zone_objects_nomd5]) + # Get landing zone data object and collection paths from iRODS + zone_all = self.irods_backend.get_objects( + self.irods, zone_path, include_md5=True, include_colls=True ) + zone_objects = [o['path'] for o in zone_all if o['type'] == 'obj'] + zone_objects_nomd5 = [ + p for p in zone_objects if p[p.rfind('.') + 1 :].lower() != 'md5' + ] + zone_objects_md5 = [ + p for p in zone_objects if p not in zone_objects_nomd5 + ] file_count = len(zone_objects_nomd5) file_count_msg_plural = 's' if file_count != 1 else '' # Get all collections with root path zone_all_colls = [zone_path] - zone_all_colls += get_subcoll_paths(zone_coll) + zone_all_colls += [o['path'] for o in zone_all if o['type'] == 'coll'] # Get list of collections containing files (ignore empty colls) zone_object_colls = list(set([p[: p.rfind('/')] for p in zone_objects])) # Convert these to collections inside sample collection - sample_colls = list( - set( - [ - sample_path + '/' + '/'.join(p.split('/')[10:]) - for p in zone_object_colls - if len(p.split('/')) > 10 - ] - ) - ) + sample_colls = [ + sample_path + '/' + '/'.join(p.split('/')[10:]) + for p in zone_object_colls + if len(p.split('/')) > 10 + ] # print('sample_path: {}'.format(sample_path)) # DEBUG # print('zone_objects: {}'.format(zone_objects)) # DEBUG diff --git a/taskflowbackend/irods_utils.py b/taskflowbackend/irods_utils.py index bf26b081..58921af1 100644 --- a/taskflowbackend/irods_utils.py +++ b/taskflowbackend/irods_utils.py @@ -1,28 +1,6 @@ """iRODS utilities for the taskflowbackend app""" -def get_subcoll_obj_paths(coll): - """ - Return paths to all files within collection and its subcollections - recursively. - """ - ret = [] - for sub_coll in coll.subcollections: - ret += get_subcoll_obj_paths(sub_coll) - for data_obj in coll.data_objects: - ret.append(data_obj.path) - return ret - - -def get_subcoll_paths(coll): - """Return paths to all subcollections within collection recursively""" - ret = [] - for sub_coll in coll.subcollections: - ret.append(sub_coll.path) - ret += get_subcoll_paths(sub_coll) - return ret - - def get_batch_role(project, user_name): """ Return role dict for use with e.g. the role_update_irods_batch flow. diff --git a/taskflowbackend/tests/base.py b/taskflowbackend/tests/base.py index 16eee450..e8ca67a0 100644 --- a/taskflowbackend/tests/base.py +++ b/taskflowbackend/tests/base.py @@ -236,9 +236,8 @@ def clear_irods_test_data(cls): ) obj_paths = [ o['path'] - for o in irods_backend.get_objs_recursively(irods, trash_coll) - + irods_backend.get_objs_recursively( - irods, trash_coll, md5=True + for o in irods_backend.get_objs_recursively( + irods, trash_coll, include_md5=True ) ] for path in obj_paths: From 239f51cc2d5603ca7898733046b1d281ae251c76 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Tue, 6 Feb 2024 17:36:18 +0100 Subject: [PATCH 11/33] fix IRODS_ROOT_PATH issues (#1888, #1889, #1890, #1891) --- CHANGELOG.rst | 7 ++ docs_manual/source/admin_settings.rst | 10 +- docs_manual/source/sodar_release_notes.rst | 1 + irodsbackend/api.py | 33 +++---- irodsbackend/tests/test_api.py | 24 ++++- taskflowbackend/flows/landing_zone_move.py | 7 +- taskflowbackend/tests/base.py | 23 +++-- taskflowbackend/tests/test_flows.py | 110 +++++++++++++++++++++ 8 files changed, 185 insertions(+), 30 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 2d11c62d..04ac04d1 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -11,6 +11,8 @@ Unreleased Added ----- +- **Irodsbackend** + - Sanitize and validate ``IRODS_ROOT_PATH`` in ``get_root_path()`` (#1891) - **Landingzones** - Create assay plugin shortcut collections for zones (#1869) @@ -23,6 +25,7 @@ Changed - **Irodsbackend** - Reduce redundant object queries (#1883) - Change method logic in ``get_objects()`` and ``get_objs_recursively()`` (#1883) + - Use ``get_root_path()`` within ``IrodsAPI`` (#1890) - **Samplesheets** - Improve Django messages for ``IrodsDataRequest`` exceptions (#1858) - Change ``IrodsDataRequest`` description if created in Ajax view (#1862) @@ -34,6 +37,8 @@ Fixed - **General** - Invalid env var retrieval for ``AUTH_LDAP*_START_TLS`` (#1853) +- **Irodsbackend** + - Invalid path returned by ``get_path()`` if ``IRODS_ROOT_PATH`` is set (#1889) - **Landingzones** - Stats badge displayed to superusers for ``DELETED`` zones (#1866) - **Samplesheets** @@ -42,6 +47,8 @@ Fixed - ``IrodsDataRequest`` user changed on object update (#1864) - ``IrodsDataRequest._validate_action()`` failing with ``delete`` action (#1858) - Protocol ref editable for new row if disabled in column config (#1875) +- **Taskflowbackend** + - Hardcoded iRODS path length in ``landing_zone_move`` (#1888) Removed ------- diff --git a/docs_manual/source/admin_settings.rst b/docs_manual/source/admin_settings.rst index 5a34af63..ec32895a 100644 --- a/docs_manual/source/admin_settings.rst +++ b/docs_manual/source/admin_settings.rst @@ -85,7 +85,8 @@ iRODS Settings ``IRODS_ZONE`` iRODS zone (string). ``IRODS_ROOT_PATH`` - iRODS root path, if something else than ``/{IRODS_ZONE}/`` (string). + iRODS root path, without the zone. Used if the SODAR projects collection is + not intended to be placed directly under ``IRODS_ZONE`` (string). ``IRODS_USER`` Name of iRODS admin user to be used by backend processes (string). ``IRODS_PASS`` @@ -108,6 +109,13 @@ iRODS Settings Enable local basic auth endpoint for iRODS if an external LDAP/AD server is not used (boolean, default: ``False``). +.. warning:: + + Changing path settings such as ``IRODS_ROOT_PATH``, ``IRODS_SAMPLE_COLL`` + and ``IRODS_LANDING_ZONE_COLL`` is **not** recommended on an existing SODAR + installation. In that case, manual moving of existing iRODS collections is + required or links to iRODS will not work as expected. + Taskflow Backend Settings ------------------------- diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 38a8b6e4..107adb81 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -15,6 +15,7 @@ Release for minor updates, maintenance and bug fixes. - Add assay plugin shortcut collection creation in landing zones - Fix iRODS data request issues +- Fix IRODS_ROOT_PATH issues - Fix LDAP TLS settings - Minor updates and bug fixes diff --git a/irodsbackend/api.py b/irodsbackend/api.py index f8d53113..0314f319 100644 --- a/irodsbackend/api.py +++ b/irodsbackend/api.py @@ -194,7 +194,8 @@ def sanitize_path(cls, path): """ Validate and sanitize iRODS path. - :param path: iRODS collection or data object path (string) + :param path: Full or partial iRODS path to collection or data object + (string) :raise: ValueError if iRODS path is invalid or unacceptable :return: Sanitized iRODS path (string) """ @@ -261,7 +262,6 @@ def get_path(cls, obj): obj_class, ', '.join(ACCEPTED_PATH_TYPES) ) ) - if obj_class == 'Project': project = obj else: @@ -270,10 +270,8 @@ def get_path(cls, obj): raise ValueError('Project not found for given object') # Base path (project) - rp = settings.IRODS_ROOT_PATH - path = '/{zone}/projects/{root_prefix}{uuid_prefix}/{uuid}'.format( - root_prefix=rp + '/' if rp else '', - zone=settings.IRODS_ZONE, + path = '{root_path}/projects/{uuid_prefix}/{uuid}'.format( + root_path=cls.get_root_path(), uuid_prefix=str(project.sodar_uuid)[:2], uuid=project.sodar_uuid, ) @@ -341,11 +339,16 @@ def get_zone_path(cls, project): @classmethod def get_root_path(cls): - """Return the root path for SODAR data""" - root_prefix = ( - '/' + settings.IRODS_ROOT_PATH if settings.IRODS_ROOT_PATH else '' - ) - return '/{}{}'.format(settings.IRODS_ZONE, root_prefix) + """Return the SODAR root path in iRODS""" + irods_zone = settings.IRODS_ZONE + root_path = '' + if settings.IRODS_ROOT_PATH: + root_path = cls.sanitize_path(settings.IRODS_ROOT_PATH) + if root_path.startswith('/' + irods_zone): + raise ValueError( + 'iRODS zone must not be included in IRODS_ROOT_PATH' + ) + return '/{}{}'.format(irods_zone, root_path) @classmethod def get_projects_path(cls): @@ -368,13 +371,9 @@ def get_uuid_from_path(cls, path, obj_type): :return: String or None :raise: ValueError if obj_type is not accepted """ - root_prefix = ( - settings.IRODS_ROOT_PATH + '/' if settings.IRODS_ROOT_PATH else '' - ) path_regex = { - 'project': '/{}/'.format(settings.IRODS_ZONE) - + root_prefix - + 'projects/[a-zA-Z0-9]{2}/(.+?)(?:/|$)', + 'project': cls.get_root_path() + + '/projects/[a-zA-Z0-9]{2}/(.+?)(?:/|$)', 'study': '/study_(.+?)(?:/|$)', 'assay': '/assay_(.+?)(?:/|$)', } diff --git a/irodsbackend/tests/test_api.py b/irodsbackend/tests/test_api.py index 01db8615..553da917 100644 --- a/irodsbackend/tests/test_api.py +++ b/irodsbackend/tests/test_api.py @@ -41,6 +41,7 @@ ZONE_DESC = 'description' IRODS_ZONE = settings.IRODS_ZONE IRODS_ROOT_PATH = 'sodar/root' +IRODS_ROOT_PATH_INVALID = 'sodar/../root' SAMPLE_COLL = settings.IRODS_SAMPLE_COLL LANDING_ZONE_COLL = settings.IRODS_LANDING_ZONE_COLL IRODS_ENV = { @@ -142,9 +143,9 @@ def test_get_path_project(self): self.assertEqual(expected, path) @override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH) - def test_get_path_project_root_path(self): + def test_get_path_project_root(self): """Test get_irods_path() with Project object and root path""" - expected = '/{zone}/projects/{root_path}/{uuid_prefix}/{uuid}'.format( + expected = '/{zone}/{root_path}/projects/{uuid_prefix}/{uuid}'.format( zone=IRODS_ZONE, root_path=IRODS_ROOT_PATH, uuid_prefix=str(self.project.sodar_uuid)[:2], @@ -230,6 +231,24 @@ def test_get_root_path_with_path(self): expected = '/{}/{}'.format(IRODS_ZONE, IRODS_ROOT_PATH) self.assertEqual(self.irods_backend.get_root_path(), expected) + @override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH + '/') + def test_get_root_path_trailing_slash(self): + """Test get_root_path() with root path setting and trailing slash""" + expected = '/{}/{}'.format(IRODS_ZONE, IRODS_ROOT_PATH) + self.assertEqual(self.irods_backend.get_root_path(), expected) + + @override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH_INVALID) + def test_get_root_path_invalid(self): + """Test get_root_path() with invalid root path""" + with self.assertRaises(ValueError): + self.irods_backend.get_root_path() + + @override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH_INVALID) + def test_get_root_path_with_zone(self): + """Test get_root_path() with value containing the zone""" + with self.assertRaises(ValueError): + self.irods_backend.get_root_path() + def test_get_projects_path(self): """Test get_projects_path() with default settings""" expected = '/{}/projects'.format(IRODS_ZONE) @@ -262,7 +281,6 @@ def test_get_uuid_from_path_project(self): def test_get_uuid_from_path_wrong_type(self): """Test get_uuid_from_path() with invalid type (should fail)""" path = self.irods_backend.get_path(self.study) - with self.assertRaises(ValueError): self.irods_backend.get_uuid_from_path(path, 'investigation') diff --git a/taskflowbackend/flows/landing_zone_move.py b/taskflowbackend/flows/landing_zone_move.py index 86ed3d5a..95aa5ee5 100644 --- a/taskflowbackend/flows/landing_zone_move.py +++ b/taskflowbackend/flows/landing_zone_move.py @@ -68,11 +68,12 @@ def build(self, force_fail=False): zone_all_colls += [o['path'] for o in zone_all if o['type'] == 'coll'] # Get list of collections containing files (ignore empty colls) zone_object_colls = list(set([p[: p.rfind('/')] for p in zone_objects])) - # Convert these to collections inside sample collection + # Convert paths to collections inside sample collection + zone_path_len = len(zone_path.split('/')) sample_colls = [ - sample_path + '/' + '/'.join(p.split('/')[10:]) + sample_path + '/' + '/'.join(p.split('/')[zone_path_len:]) for p in zone_object_colls - if len(p.split('/')) > 10 + if len(p.split('/')) > zone_path_len ] # print('sample_path: {}'.format(sample_path)) # DEBUG diff --git a/taskflowbackend/tests/base.py b/taskflowbackend/tests/base.py index e8ca67a0..2fa8baee 100644 --- a/taskflowbackend/tests/base.py +++ b/taskflowbackend/tests/base.py @@ -192,19 +192,30 @@ def clear_irods_test_data(cls): 'TASKFLOW_TEST_MODE not True, cleanup command not allowed' ) irods_backend = get_backend_api('omics_irods') - projects_root = irods_backend.get_projects_path() permanent_users = getattr( settings, 'TASKFLOW_TEST_PERMANENT_USERS', DEFAULT_PERMANENT_USERS ) # TODO: Remove stuff from user home collections with irods_backend.get_session() as irods: - # Remove project folders + # Remove SODAR project and root collections + rm_kwargs = {'recurse': True, 'force': True} try: - irods.collections.remove( - projects_root, recurse=True, force=True - ) - logger.debug('Removed projects root: {}'.format(projects_root)) + if settings.IRODS_ROOT_PATH: + sodar_root = '/{}/{}'.format( + settings.IRODS_ZONE, + settings.IRODS_ROOT_PATH.split('/')[0], + ) + irods.collections.remove(sodar_root, **rm_kwargs) + logger.debug( + 'Removed root collection: {}'.format(sodar_root) + ) + else: + projects_root = irods_backend.get_projects_path() + irods.collections.remove(projects_root, **rm_kwargs) + logger.debug( + 'Removed projects root: {}'.format(projects_root) + ) except Exception: pass # This is OK, the root just wasn't there # Remove created user groups and users diff --git a/taskflowbackend/tests/test_flows.py b/taskflowbackend/tests/test_flows.py index 85d57c21..5d8454f4 100644 --- a/taskflowbackend/tests/test_flows.py +++ b/taskflowbackend/tests/test_flows.py @@ -80,6 +80,7 @@ UPDATED_TITLE = 'NewTitle' UPDATED_DESC = 'updated description' SCRIPT_USER_NAME = 'script_user' +IRODS_ROOT_PATH = 'sodar/root' class TaskflowbackendFlowTestBase(TaskflowViewTestBase): @@ -996,6 +997,115 @@ def test_move_restrict(self): ) +@override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH) +class TestLandingZoneMoveAltRootPath( + LandingZoneMixin, + LandingZoneTaskflowMixin, + SampleSheetIOMixin, + SampleSheetTaskflowMixin, + TaskflowbackendFlowTestBase, +): + """Tests for the landing_zone_move flow with IRODS_ROOT_PATH set""" + + def setUp(self): + super().setUp() + self.project, self.owner_as = self.make_project_taskflow( + 'NewProject', PROJECT_TYPE_PROJECT, self.category, self.user + ) + self.group_name = self.irods_backend.get_user_group_name(self.project) + self.project_path = self.irods_backend.get_path(self.project) + # Import investigation + self.investigation = self.import_isa_from_file(SHEET_PATH, self.project) + self.study = self.investigation.studies.first() + self.assay = self.study.assays.first() + # Create iRODS collections + self.make_irods_colls(self.investigation) + # Create zone + self.zone = self.make_landing_zone( + title=ZONE_TITLE, + project=self.project, + user=self.user, + assay=self.assay, + description=ZONE_DESC, + status=ZONE_STATUS_CREATING, + ) + self.make_zone_taskflow(self.zone) + self.sample_path = self.irods_backend.get_path(self.assay) + self.zone_path = self.irods_backend.get_path(self.zone) + self.group_name = self.irods_backend.get_user_group_name(self.project) + + def test_move_alt_root_path(self): + """Test landing_zone_move with IRODS_ROOT_PATH set""" + # Assert alt path have been set correctly and returned for all paths + root_path = self.irods_backend.get_root_path() + self.assertEqual( + root_path, '/{}/{}'.format(settings.IRODS_ZONE, IRODS_ROOT_PATH) + ) + self.assertTrue(self.project_path.startswith(root_path)) + self.assertTrue(self.sample_path.startswith(root_path)) + self.assertTrue(self.zone_path.startswith(root_path)) + self.assertEqual(self.irods.collections.exists(self.project_path), True) + self.assertEqual(self.irods.collections.exists(self.sample_path), True) + self.assertEqual(self.irods.collections.exists(self.zone_path), True) + + self.assertEqual(self.zone.status, ZONE_STATUS_ACTIVE) + empty_coll_path = os.path.join(self.zone_path, COLL_NAME) + self.irods.collections.create(empty_coll_path) + obj_coll_path = os.path.join(self.zone_path, OBJ_COLL_NAME) + obj_coll = self.irods.collections.create(obj_coll_path) + obj = self.make_irods_object(obj_coll, OBJ_NAME) + self.make_irods_md5_object(obj) + obj_path = os.path.join(obj_coll_path, OBJ_NAME) + sample_obj_path = os.path.join( + self.sample_path, OBJ_COLL_NAME, OBJ_NAME + ) + + self.assertEqual(self.irods.collections.exists(empty_coll_path), True) + self.assertEqual(self.irods.collections.exists(obj_coll_path), True) + self.assertEqual(self.irods.data_objects.exists(obj_path), True) + self.assertEqual( + self.irods.data_objects.exists(obj_path + '.md5'), True + ) + self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False) + self.assertEqual( + self.irods.data_objects.exists(sample_obj_path + '.md5'), + False, + ) + + flow_data = {'zone_uuid': str(self.zone.sodar_uuid)} + flow = self.taskflow.get_flow( + irods_backend=self.irods_backend, + project=self.project, + flow_name='landing_zone_move', + flow_data=flow_data, + ) + self._build_and_run(flow) + + self.zone.refresh_from_db() + self.assertEqual(self.zone.status, ZONE_STATUS_MOVED) + self.assertEqual(self.irods.collections.exists(self.zone_path), False) + sample_empty_path = os.path.join(self.sample_path, COLL_NAME) + # An empty collection should not be created by moving + self.assertEqual( + self.irods.collections.exists(sample_empty_path), False + ) + self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True) + self.assertEqual( + self.irods.data_objects.exists(sample_obj_path + '.md5'), + True, + ) + self.assert_irods_access( + self.group_name, + sample_obj_path, + self.irods_access_read, + ) + self.assert_irods_access( + self.group_name, + sample_obj_path + '.md5', + self.irods_access_read, + ) + + class TestProjectCreate(TaskflowbackendFlowTestBase): """Tests for the project_create flow""" From 4039b0cc98adf31be5ecfacf83bc4d5a95029ffc Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Thu, 8 Feb 2024 10:23:33 +0100 Subject: [PATCH 12/33] upgrade to django v3.2.24 (#1854) --- CHANGELOG.rst | 1 + requirements/base.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 04ac04d1..5b7f6e8e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -20,6 +20,7 @@ Changed ------- - **General** + - Upgrade to Django v3.2.24 (#1854) - Upgrade critical Vue app dependencies (#1854) - Update installation documentation (#1871) - **Irodsbackend** diff --git a/requirements/base.txt b/requirements/base.txt index e5b08299..04fe2731 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ wheel==0.40.0 setuptools==67.6.0 # Django -django==3.2.23 +django==3.2.24 # Configuration django-environ==0.10.0 From e418362345533bafba7af6a9c5399804ee8bbc54 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Fri, 9 Feb 2024 09:12:31 +0100 Subject: [PATCH 13/33] update zone failed status handling in run_flow() (#1458) --- CHANGELOG.rst | 1 + taskflowbackend/api.py | 17 +++++++++++++---- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5b7f6e8e..7cdbb812 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -32,6 +32,7 @@ Changed - Change ``IrodsDataRequest`` description if created in Ajax view (#1862) - **Taskflowbackend** - Optimize ``landing_zone_move`` iRODS path retrieval (#1882) + - Set zone status on uncaught errors in ``run_flow()`` (#1458) Fixed ----- diff --git a/taskflowbackend/api.py b/taskflowbackend/api.py index 6585cf2c..708ff52a 100644 --- a/taskflowbackend/api.py +++ b/taskflowbackend/api.py @@ -125,7 +125,7 @@ def run_flow( ex_msg = None coordinator = None lock = None - # Get zone if present in flow + # Get landing zone if present in flow zone = None if flow.flow_data.get('zone_uuid'): zone = LandingZone.objects.filter( @@ -185,9 +185,18 @@ def run_flow( # Raise exception if failed, otherwise return result if ex_msg: - logger.error(ex_msg) # TODO: Isn't this redundant? - # NOTE: Not providing zone here since it's handled by flow - cls._raise_flow_exception(ex_msg, tl_event, None) + logger.error(ex_msg) + # Provide landing zone if error occurs but status has not been set + # (This means a failure has not been properly handled in the flow) + ex_zone = None + if zone: + zone.refresh_from_db() + if zone.status not in [ + ZONE_STATUS_NOT_CREATED, + ZONE_STATUS_FAILED, + ]: + ex_zone = zone + cls._raise_flow_exception(ex_msg, tl_event, ex_zone) return flow_result def submit( From a3100ca7bee226d59c7ee3877534fc804647c1a0 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Fri, 9 Feb 2024 11:01:16 +0100 Subject: [PATCH 14/33] fix sheet template creation with slash in id/title field (#1896) --- CHANGELOG.rst | 1 + requirements/base.txt | 4 ++-- samplesheets/tests/test_views.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 32 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7cdbb812..c2560999 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -49,6 +49,7 @@ Fixed - ``IrodsDataRequest`` user changed on object update (#1864) - ``IrodsDataRequest._validate_action()`` failing with ``delete`` action (#1858) - Protocol ref editable for new row if disabled in column config (#1875) + - Sheet template creation failure with slash characters in title/ID fields (#1896) - **Taskflowbackend** - Hardcoded iRODS path length in ``landing_zone_move`` (#1888) diff --git a/requirements/base.txt b/requirements/base.txt index 04fe2731..5bca5041 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -102,8 +102,8 @@ pronto==2.5.0 # For OWL conversion # CUBI ISA-Tab templates cookiecutter==2.2.3 -cubi-isa-templates==0.1.1 -# -e git+https://github.com/bihealth/cubi-isa-templates.git@c4139e147fd5db10bd6478460edbbce157abe425#egg=cubi-isa-templates +# cubi-isa-templates==0.1.1 +-e git+https://github.com/bihealth/cubi-isa-templates.git@d3ebe866f3823d81fdb358042ef7ed4d8b8fd100#egg=cubi-isa-templates # Taskflow requirements tooz==3.0.0 diff --git a/samplesheets/tests/test_views.py b/samplesheets/tests/test_views.py index e463edeb..65fec757 100644 --- a/samplesheets/tests/test_views.py +++ b/samplesheets/tests/test_views.py @@ -120,6 +120,11 @@ EDIT_NEW_VALUE_STR = 'edited value' DUMMY_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' IRODS_FILE_PATH = '/sodarZone/path/test1.txt' +TPL_FILE_NAME_FIELDS = [ + 'investigation_title', + 'investigation_id', + 'study_title', +] with open(CONFIG_PATH_DEFAULT) as fp: CONFIG_DATA_DEFAULT = json.load(fp) @@ -796,6 +801,30 @@ def test_post_batch(self): ) self.project.investigations.first().delete() + def test_post_batch_file_name_slash(self): + """Test POST with slashes in values used for file names""" + for t in ISA_TEMPLATES: + self.assertIsNone(self.project.investigations.first()) + post_data = self._get_post_data(t) + for k in TPL_FILE_NAME_FIELDS: + if k in post_data: + post_data[k] += '/test' + with self.login(self.user): + response = self.client.post( + reverse( + 'samplesheets:template_create', + kwargs={'project': self.project.sodar_uuid}, + ) + + '?sheet_tpl=' + + t.name, + data=post_data, + ) + self.assertEqual(response.status_code, 302, msg=t.name) + self.assertIsNotNone( + self.project.investigations.first(), msg=t.name + ) + self.project.investigations.first().delete() + def test_post_multiple(self): """Test multiple requests to add multiple sample sheets (should fail)""" tpl = ISA_TEMPLATES[0] From a0752e2bb60cb4c16dc1b4338926341cdf19f860 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Mon, 12 Feb 2024 17:35:54 +0100 Subject: [PATCH 15/33] upgrade to django-sodar-core v0.13.4-wip (#1899) --- CHANGELOG.rst | 1 + docs_manual/source/sodar_release_notes.rst | 1 + requirements/base.txt | 4 ++-- requirements/ldap.txt | 4 ++-- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c2560999..129dcf48 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -21,6 +21,7 @@ Changed - **General** - Upgrade to Django v3.2.24 (#1854) + - Upgrade to django-sodar-core v0.13.4-WIP (#1899) - Upgrade critical Vue app dependencies (#1854) - Update installation documentation (#1871) - **Irodsbackend** diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 107adb81..939f6137 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -18,6 +18,7 @@ Release for minor updates, maintenance and bug fixes. - Fix IRODS_ROOT_PATH issues - Fix LDAP TLS settings - Minor updates and bug fixes +- Upgrade to SODAR Core v0.13.4 v0.14.1 (2023-12-12) diff --git a/requirements/base.txt b/requirements/base.txt index 5bca5041..68bffe44 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -70,8 +70,8 @@ mistune==2.0.5 django-autocomplete-light==3.9.4 # SODAR Core -django-sodar-core==0.13.3 -# -e git+https://github.com/bihealth/sodar-core.git@11b666cd03c555d0b95c3de547fe312f93263f29#egg=django-sodar-core +# django-sodar-core==0.13.3 +-e git+https://github.com/bihealth/sodar-core.git@1117ff87723389be928538b965fe89576687c1a6#egg=django-sodar-core # Celery celery==5.2.7 diff --git a/requirements/ldap.txt b/requirements/ldap.txt index 5cbb00d9..8960c63d 100644 --- a/requirements/ldap.txt +++ b/requirements/ldap.txt @@ -1,5 +1,5 @@ # Optional LDAP dependencies go here -r base.txt -django-auth-ldap==4.1.0 -python-ldap==3.4.3 +django-auth-ldap>=4.6.0, <4.7 +python-ldap>=3.4.4, <3.5 From 465bd5a933e1a3ce4f11f3682a357f8548263225 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Mon, 12 Feb 2024 18:01:10 +0100 Subject: [PATCH 16/33] update TASKFLOW_IRODS_CONN_TIMEOUT default value (#1900) --- CHANGELOG.rst | 1 + config/settings/base.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 129dcf48..61cfb8cf 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -34,6 +34,7 @@ Changed - **Taskflowbackend** - Optimize ``landing_zone_move`` iRODS path retrieval (#1882) - Set zone status on uncaught errors in ``run_flow()`` (#1458) + - Change ``TASKFLOW_IRODS_CONN_TIMEOUT`` default value to ``960`` (#1900) Fixed ----- diff --git a/config/settings/base.py b/config/settings/base.py index 0c7bc439..d9df4e87 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -739,7 +739,7 @@ def set_logging(level=None): # Taskflow backend settings # Connection timeout for taskflowbackend flows (other sessions not affected) -TASKFLOW_IRODS_CONN_TIMEOUT = env.int('TASKFLOW_IRODS_CONN_TIMEOUT', 480) +TASKFLOW_IRODS_CONN_TIMEOUT = env.int('TASKFLOW_IRODS_CONN_TIMEOUT', 960) TASKFLOW_LOCK_RETRY_COUNT = env.int('TASKFLOW_LOCK_RETRY_COUNT', 2) TASKFLOW_LOCK_RETRY_INTERVAL = env.int('TASKFLOW_LOCK_RETRY_INTERVAL', 3) TASKFLOW_LOCK_ENABLED = True From e722fd7b76d69d2f5a894080bad483db378574cf Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Tue, 13 Feb 2024 11:29:40 +0100 Subject: [PATCH 17/33] add taskflowbackend settings in siteinfo (#1901) --- CHANGELOG.rst | 2 ++ taskflowbackend/plugins.py | 10 +++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 61cfb8cf..9016c77f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -15,6 +15,8 @@ Added - Sanitize and validate ``IRODS_ROOT_PATH`` in ``get_root_path()`` (#1891) - **Landingzones** - Create assay plugin shortcut collections for zones (#1869) +- **Taskflowbackend** + - Django settings in siteinfo (#1901) Changed ------- diff --git a/taskflowbackend/plugins.py b/taskflowbackend/plugins.py index ebf7e671..db0e69f5 100644 --- a/taskflowbackend/plugins.py +++ b/taskflowbackend/plugins.py @@ -34,9 +34,14 @@ # Local constants APP_NAME = 'taskflowbackend' -TL_SUBMIT_DESC = 'Job submitted to Taskflow' IRODS_CAT_SKIP_MSG = 'Categories are not synchronized into iRODS' RANK_FINDER = ROLE_RANKING[PROJECT_ROLE_FINDER] +TASKFLOW_INFO_SETTINGS = [ + 'TASKFLOW_IRODS_CONN_TIMEOUT', + 'TASKFLOW_LOCK_RETRY_COUNT', + 'TASKFLOW_LOCK_RETRY_INTERVAL', +] +TL_SUBMIT_DESC = 'Job submitted to Taskflow' class BackendPlugin(ProjectModifyPluginMixin, BackendPluginPoint): @@ -54,6 +59,9 @@ class BackendPlugin(ProjectModifyPluginMixin, BackendPluginPoint): #: Description string description = 'SODAR Taskflow backend for iRODS data transactions' + #: Names of plugin specific Django settings to display in siteinfo + info_settings = TASKFLOW_INFO_SETTINGS + # Internal helpers --------------------------------------------------------- @classmethod From 9358c8f250c1ff416ec4bd97752190348d7c2648 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Tue, 13 Feb 2024 13:47:51 +0100 Subject: [PATCH 18/33] add landingzones siteinfo statistics (#1898) --- CHANGELOG.rst | 1 + docs_manual/source/sodar_release_notes.rst | 1 + landingzones/plugins.py | 39 ++++++ landingzones/tests/test_plugins.py | 138 +++++++++++++++++++++ 4 files changed, 179 insertions(+) create mode 100644 landingzones/tests/test_plugins.py diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9016c77f..a882fd49 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -15,6 +15,7 @@ Added - Sanitize and validate ``IRODS_ROOT_PATH`` in ``get_root_path()`` (#1891) - **Landingzones** - Create assay plugin shortcut collections for zones (#1869) + - Zone statistics for siteinfo (#1898) - **Taskflowbackend** - Django settings in siteinfo (#1901) diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 939f6137..4ea73d0e 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -14,6 +14,7 @@ v0.14.2 (WIP) Release for minor updates, maintenance and bug fixes. - Add assay plugin shortcut collection creation in landing zones +- Add landing zone statistics in siteinfo - Fix iRODS data request issues - Fix IRODS_ROOT_PATH issues - Fix LDAP TLS settings diff --git a/landingzones/plugins.py b/landingzones/plugins.py index ce8a09e8..9a5d78be 100644 --- a/landingzones/plugins.py +++ b/landingzones/plugins.py @@ -19,6 +19,8 @@ from landingzones.constants import ( STATUS_ALLOW_UPDATE, + STATUS_BUSY, + STATUS_FINISHED, ZONE_STATUS_MOVED, ZONE_STATUS_DELETED, ) @@ -148,6 +150,43 @@ def get_object_link(self, model_str, uuid): 'label': obj.get_display_name(), } + def get_statistics(self): + """ + Return app statistics as a dict. Should take the form of + {id: {label, value, url (optional), description (optional)}}. + + :return: Dict + """ + return { + 'zones_total': { + 'label': 'Total zones', + 'value': LandingZone.objects.count(), + }, + 'zones_active': { + 'label': 'Active zones', + 'value': LandingZone.objects.filter( + status__in=STATUS_ALLOW_UPDATE + ).count(), + 'description': 'Landing zones available for use (active or ' + 'failed)', + }, + 'zones_finished': { + 'label': 'Finished zones', + 'value': LandingZone.objects.filter( + status__in=STATUS_FINISHED + ).count(), + 'description': 'Landing zones finished successfully, deleted ' + 'or not created', + }, + 'zones_busy': { + 'label': 'Busy zones', + 'value': LandingZone.objects.filter( + status__in=STATUS_BUSY + ).count(), + 'description': 'Landing zones with an ongoing transaction', + }, + } + def get_project_list_value(self, column_id, project, user): """ Return a value for the optional additional project list column specific diff --git a/landingzones/tests/test_plugins.py b/landingzones/tests/test_plugins.py new file mode 100644 index 00000000..2af97f2a --- /dev/null +++ b/landingzones/tests/test_plugins.py @@ -0,0 +1,138 @@ +"""Tests for plugins in the landingzones app""" + +from test_plus.test import TestCase + +# Projectroles dependency +from projectroles.models import SODAR_CONSTANTS +from projectroles.plugins import ProjectAppPluginPoint +from projectroles.tests.test_models import ( + ProjectMixin, + RoleMixin, + RoleAssignmentMixin, +) + +# Samplesheets dependency +from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR + +import landingzones.constants as lc +from landingzones.tests.test_models import LandingZoneMixin + + +# Local constants +SHEET_PATH_SMALL2 = SHEET_DIR + 'i_small2.zip' + + +class LandingzonesPluginTestBase( + ProjectMixin, + RoleMixin, + RoleAssignmentMixin, + SampleSheetIOMixin, + LandingZoneMixin, + TestCase, +): + """Base class for landingzones plugin tests""" + + def setUp(self): + # Init roles + self.init_roles() + # Make owner user + self.user_owner = self.make_user('owner') + # Init project and assignment + self.project = self.make_project( + 'TestProject', SODAR_CONSTANTS['PROJECT_TYPE_PROJECT'], None + ) + self.owner_as = self.make_assignment( + self.project, self.user_owner, self.role_owner + ) + # Import investigation + self.investigation = self.import_isa_from_file( + SHEET_PATH_SMALL2, self.project + ) + self.investigation.irods_status = True + self.investigation.save() + self.study = self.investigation.studies.first() + self.assay = self.study.assays.first() + # Get plugin + self.plugin = ProjectAppPluginPoint.get_plugin('landingzones') + + +class TestGetStatistics(LandingzonesPluginTestBase): + """Tests for get_statistics()""" + + def _make_zone(self, status): + self.make_landing_zone( + 'zone_{}'.format(status.lower()), + self.project, + self.user_owner, + self.assay, + status=status, + ) + + def _assert_stats(self, expected): + stats = self.plugin.get_statistics() + for k, v in expected.items(): + self.assertEqual(stats[k]['value'], v) + + def test_get_statistics_active(self): + """Test get_statistics() with active zones""" + self._make_zone(lc.ZONE_STATUS_ACTIVE) + self._make_zone(lc.ZONE_STATUS_FAILED) + self._assert_stats( + { + 'zones_total': 2, + 'zones_active': 2, + 'zones_finished': 0, + 'zones_busy': 0, + } + ) + + def test_get_statistics_finished(self): + """Test get_statistics() with finished zones""" + self._make_zone(lc.ZONE_STATUS_MOVED) + self._make_zone(lc.ZONE_STATUS_DELETED) + self._assert_stats( + { + 'zones_total': 2, + 'zones_active': 0, + 'zones_finished': 2, + 'zones_busy': 0, + } + ) + + def test_get_statistics_busy(self): + """Test get_statistics() with busy zones""" + self._make_zone(lc.ZONE_STATUS_MOVING) + self._make_zone(lc.ZONE_STATUS_DELETING) + self._assert_stats( + { + 'zones_total': 2, + 'zones_active': 0, + 'zones_finished': 0, + 'zones_busy': 2, + } + ) + + def test_get_statistics_mixed(self): + """Test get_statistics() with mixed zone statuses""" + self._make_zone(lc.ZONE_STATUS_ACTIVE) + self._make_zone(lc.ZONE_STATUS_MOVING) + self._make_zone(lc.ZONE_STATUS_DELETED) + self._assert_stats( + { + 'zones_total': 3, + 'zones_active': 1, + 'zones_finished': 1, + 'zones_busy': 1, + } + ) + + def test_get_statistics_no_zones(self): + """Test get_statistics() with no zones""" + self._assert_stats( + { + 'zones_total': 0, + 'zones_active': 0, + 'zones_finished': 0, + 'zones_busy': 0, + } + ) From 1deac6a0990a03a82aec65eaa8f37ef89512fd77 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Wed, 14 Feb 2024 15:02:25 +0100 Subject: [PATCH 19/33] fix and refactor irods stats retrieval (#1327, #1886, #1903) --- CHANGELOG.rst | 2 + docs_manual/source/sodar_release_notes.rst | 1 + .../static/irodsbackend/js/irodsbackend.js | 54 +-- .../templatetags/irodsbackend_tags.py | 13 +- irodsbackend/tests/test_views.py | 414 +----------------- irodsbackend/tests/test_views_taskflow.py | 386 ++++++++++++++++ irodsbackend/views.py | 47 +- 7 files changed, 435 insertions(+), 482 deletions(-) create mode 100644 irodsbackend/tests/test_views_taskflow.py diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a882fd49..69052a98 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -31,6 +31,7 @@ Changed - Reduce redundant object queries (#1883) - Change method logic in ``get_objects()`` and ``get_objs_recursively()`` (#1883) - Use ``get_root_path()`` within ``IrodsAPI`` (#1890) + - Refactor ``IrodsStatisticsAjaxView`` and related JQuery (#1903) - **Samplesheets** - Improve Django messages for ``IrodsDataRequest`` exceptions (#1858) - Change ``IrodsDataRequest`` description if created in Ajax view (#1862) @@ -46,6 +47,7 @@ Fixed - Invalid env var retrieval for ``AUTH_LDAP*_START_TLS`` (#1853) - **Irodsbackend** - Invalid path returned by ``get_path()`` if ``IRODS_ROOT_PATH`` is set (#1889) + - Stats badge stuck in updating with non-200 POST status (#1327, #1886) - **Landingzones** - Stats badge displayed to superusers for ``DELETED`` zones (#1866) - **Samplesheets** diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 4ea73d0e..38a1e751 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -18,6 +18,7 @@ Release for minor updates, maintenance and bug fixes. - Fix iRODS data request issues - Fix IRODS_ROOT_PATH issues - Fix LDAP TLS settings +- Fix iRODS stats badge stuck in "updating" - Minor updates and bug fixes - Upgrade to SODAR Core v0.13.4 diff --git a/irodsbackend/static/irodsbackend/js/irodsbackend.js b/irodsbackend/static/irodsbackend/js/irodsbackend.js index 7a287f7c..f4f9deaa 100644 --- a/irodsbackend/static/irodsbackend/js/irodsbackend.js +++ b/irodsbackend/static/irodsbackend/js/irodsbackend.js @@ -2,54 +2,48 @@ Collection statistics updating function ***************************************/ var updateCollectionStats = function() { - var statsPaths = []; - var projectUUID = null; + var paths = {}; $('span.sodar-irods-stats').each(function () { - var currentPath = decodeURIComponent($(this).attr( - 'data-stats-url').split('=')[1]); - if (!projectUUID) { - projectUUID = currentPath.split('/')[4]; + var projectUUID = $(this).attr('data-project-uuid'); + if (!paths.hasOwnProperty(projectUUID)) { + paths[projectUUID] = []; } - if (!(statsPaths.includes(currentPath))){ - statsPaths.push(currentPath); + var currentPath = $(this).attr('data-stats-path'); + if (!(paths[projectUUID].includes(currentPath))) { + paths[projectUUID].push(currentPath); } }); - var d = {paths: statsPaths}; - - if (projectUUID) { + $.each(paths, function(projectUUID, paths) { $.ajax({ url: '/irodsbackend/ajax/stats/' + projectUUID, method: 'POST', dataType: 'json', - data: d, + data: {paths: paths}, contentType: "application/x-www-form-urlencoded; charset=UTF-8", traditional: true }).done(function (data) { $('span.sodar-irods-stats').each(function () { var statsSpan = $(this); - var irodsPath = decodeURIComponent(statsSpan.attr( - 'data-stats-url')).split('=')[1]; - var fileSuffix = 's'; - for (idx in data['coll_objects']) { - if (data['coll_objects'].hasOwnProperty(idx)) { - if (irodsPath === data['coll_objects'][idx]['path'] - && data['coll_objects'][idx]['status'] === '200') { - var stats = data['coll_objects'][idx]['stats']; - if (stats['file_count'] === 1) { - fileSuffix = ''; - } - statsSpan.text( - stats['file_count'] + ' file' + fileSuffix + - ' ('+ humanFileSize(stats['total_size'], true) + ')'); - break; - } - } + var path = statsSpan.attr('data-stats-path'); + var s = 's'; + if (path in data['irods_stats']) { + var status = data['irods_stats'][path]['status']; + if (status === 200) { + var fileCount = data['irods_stats'][path]['file_count']; + var totalSize = data['irods_stats'][path]['total_size']; + if (fileCount === 1) s = ''; + statsSpan.text( + fileCount + ' file' + s + + ' (' + humanFileSize(totalSize, true) + ')'); + } else if (status === 404) statsSpan.text('Not found'); + else if (status === 403) statsSpan.text('Denied'); + else statsSpan.text('Error'); } }); }); - } + }); }; /*************************************** diff --git a/irodsbackend/templatetags/irodsbackend_tags.py b/irodsbackend/templatetags/irodsbackend_tags.py index 81ce7836..ba95f84f 100644 --- a/irodsbackend/templatetags/irodsbackend_tags.py +++ b/irodsbackend/templatetags/irodsbackend_tags.py @@ -1,6 +1,4 @@ from django import template -from django.urls import reverse -from django.utils.http import urlencode from irodsbackend.api import IrodsAPI @@ -26,18 +24,11 @@ def get_stats_html(irods_path, project): :param project: Project object :return: String (contains HTML) """ - url_kwargs = {'project': str(project.sodar_uuid)} - query_string = {'path': irods_path} - url = ( - reverse('irodsbackend:stats', kwargs=url_kwargs) - + '?' - + urlencode(query_string) - ) return ( '' + 'data-stats-path="{}" data-project-uuid="{}">' ' Updating..' - ''.format(url) + ''.format(irods_path, project.sodar_uuid) ) diff --git a/irodsbackend/tests/test_views.py b/irodsbackend/tests/test_views.py index 94576260..175e295e 100644 --- a/irodsbackend/tests/test_views.py +++ b/irodsbackend/tests/test_views.py @@ -1,430 +1,18 @@ """Tests for views in the irodsbackend app""" import base64 -import os -from irods.test.helpers import make_object - -from django.conf import settings -from django.test import RequestFactory, override_settings +from django.test import override_settings from django.urls import reverse from test_plus.test import TestCase -# Projectroles dependency -from projectroles.models import SODAR_CONSTANTS - -# Taskflowbackend dependency -from taskflowbackend.tests.base import TaskflowViewTestBase - - -# SODAR constants -PROJECT_ROLE_OWNER = SODAR_CONSTANTS['PROJECT_ROLE_OWNER'] -PROJECT_ROLE_DELEGATE = SODAR_CONSTANTS['PROJECT_ROLE_DELEGATE'] -PROJECT_ROLE_CONTRIBUTOR = SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR'] -PROJECT_ROLE_GUEST = SODAR_CONSTANTS['PROJECT_ROLE_GUEST'] -PROJECT_TYPE_CATEGORY = SODAR_CONSTANTS['PROJECT_TYPE_CATEGORY'] -PROJECT_TYPE_PROJECT = SODAR_CONSTANTS['PROJECT_TYPE_PROJECT'] # Local constants -IRODS_TEMP_COLL = 'temp' -IRODS_OBJ_SIZE = 1024 -IRODS_OBJ_CONTENT = ''.join('x' for _ in range(IRODS_OBJ_SIZE)) -IRODS_OBJ_NAME = 'test1.txt' -IRODS_MD5_NAME = 'test1.txt.md5' -IRODS_NON_PROJECT_PATH = ( - '/' + settings.IRODS_ZONE + '/home/' + settings.IRODS_USER -) -IRODS_FAIL_COLL = 'xeiJ1Vie' LOCAL_USER_NAME = 'local_user' LOCAL_USER_PASS = 'password' -class IrodsbackendViewTestBase(TaskflowViewTestBase): - """Base class for irodsbackend UI view testing""" - - def setUp(self): - super().setUp() - self.req_factory = RequestFactory() - # Init project with owner in taskflow - self.project, self.owner_as = self.make_project_taskflow( - 'TestProject', PROJECT_TYPE_PROJECT, self.category, self.user - ) - # Create test collection in iRODS - self.project_path = self.irods_backend.get_path(self.project) - self.irods_path = os.path.join(self.project_path, IRODS_TEMP_COLL) - self.irods_coll = self.irods.collections.create(self.irods_path) - - -class TestIrodsStatisticsAjaxView(IrodsbackendViewTestBase): - """Tests for the landing zone collection statistics Ajax view""" - - def setUp(self): - super().setUp() - self.post_url = self.irods_backend.get_url( - view='stats', project=self.project, method='POST' - ) - - def test_get_empty_coll(self): - """Test GET request for stats on empty iRODS collection""" - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='stats', project=self.project, path=self.irods_path - ) - ) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['file_count'], 0) - self.assertEqual(response.data['total_size'], 0) - - def test_get_invalid_coll(self): - """Test GET request with invalid collection (should fail)""" - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='stats', project=self.project, path=self.irods_path - ) - + '%2F..' - ) - self.assertEqual(response.status_code, 400) - - def test_get_coll_obj(self): - """Test GET for stats on collection with data object""" - # Put data object in iRODS - obj_path = self.irods_path + '/' + IRODS_OBJ_NAME - make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='stats', project=self.project, path=self.irods_path - ) - ) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['file_count'], 1) - self.assertEqual(response.data['total_size'], IRODS_OBJ_SIZE) - - def test_get_coll_md5(self): - """Test GET for stats on collection with data object and md5""" - # Put data object in iRODS - obj_path = self.irods_path + '/' + IRODS_OBJ_NAME - make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) - # Put MD5 data object in iRODS - md5_path = self.irods_path + '/' + IRODS_MD5_NAME - make_object(self.irods, md5_path, IRODS_OBJ_CONTENT) # Not actual md5 - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='stats', project=self.project, path=self.irods_path - ) - ) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['file_count'], 1) # md5 not counted - self.assertEqual(response.data['total_size'], IRODS_OBJ_SIZE) - - def test_get_coll_not_found(self): - """Test GET for stats on non-existing collection""" - fail_path = self.irods_path + '/' + IRODS_FAIL_COLL - self.assertEqual(self.irods.collections.exists(fail_path), False) - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='stats', project=self.project, path=fail_path - ) - ) - self.assertEqual(response.status_code, 404) - - def test_get_coll_not_in_project(self): - """Test GET for stats on collection not belonging to project""" - self.assertEqual( - self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True - ) - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='stats', - project=self.project, - path=IRODS_NON_PROJECT_PATH, - ) - ) - self.assertEqual(response.status_code, 400) - - def test_get_no_access(self): - """Test GET for stats with no access for iRODS collection""" - new_user = self.make_user('new_user') - self.make_assignment( - self.project, new_user, self.role_contributor - ) # No taskflow - with self.login(new_user): - response = self.client.get( - self.irods_backend.get_url( - view='stats', project=self.project, path=self.irods_path - ) - ) - self.assertEqual(response.status_code, 403) - - def test_post_empty_coll_stats(self): - """Test POST for batch stats on empty iRODS collections""" - post_data = { - 'paths': [self.irods_path, self.irods_path], - 'md5': ['0', '0'], - } - with self.login(self.user): - response = self.client.post(self.post_url, post_data) - self.assertEqual(response.status_code, 200) - for idx in range(len(response.data['coll_objects'])): - self.assertEqual( - response.data['coll_objects'][idx]['status'], '200' - ) - self.assertEqual( - response.data['coll_objects'][idx]['stats']['file_count'], 0 - ) - self.assertEqual( - response.data['coll_objects'][idx]['stats']['total_size'], 0 - ) - - def test_post_coll_stats(self): - """Test POST for batch stats on collections with data object""" - # Put data object in iRODS - obj_path = self.irods_path + '/' + IRODS_OBJ_NAME - make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) - post_data = { - 'paths': [self.irods_path, self.irods_path], - 'md5': ['0', '0'], - } - with self.login(self.user): - response = self.client.post(self.post_url, post_data) - self.assertEqual(response.status_code, 200) - for idx in range(len(response.data['coll_objects'])): - self.assertEqual( - response.data['coll_objects'][idx]['status'], '200' - ) - self.assertEqual( - response.data['coll_objects'][idx]['stats']['file_count'], 1 - ) - self.assertEqual( - response.data['coll_objects'][idx]['stats']['total_size'], - IRODS_OBJ_SIZE, - ) - - def test_post_coll_md5_stats(self): - """Test POST for batch stats on collections with data object and md5""" - # Put data object in iRODS - obj_path = self.irods_path + '/' + IRODS_OBJ_NAME - make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) - # Put MD5 data object in iRODS - md5_path = self.irods_path + '/' + IRODS_MD5_NAME - make_object(self.irods, md5_path, IRODS_OBJ_CONTENT) # Not actual md5 - post_data = { - 'paths': [self.irods_path, self.irods_path], - 'md5': ['1', '1'], - } - with self.login(self.user): - response = self.client.post(self.post_url, post_data) - self.assertEqual(response.status_code, 200) - for idx in range(len(response.data['coll_objects'])): - self.assertEqual( - response.data['coll_objects'][idx]['status'], '200' - ) - self.assertEqual( - response.data['coll_objects'][idx]['stats']['file_count'], 1 - ) - self.assertEqual( - response.data['coll_objects'][idx]['stats']['total_size'], - IRODS_OBJ_SIZE, - ) - - def test_post_coll_not_found(self): - """Test POST for stats on non-existing collections""" - fail_path = self.irods_path + '/' + IRODS_FAIL_COLL - self.assertEqual(self.irods.collections.exists(fail_path), False) - post_data = {'paths': [fail_path, fail_path], 'md5': ['0', '0']} - with self.login(self.user): - response = self.client.post(self.post_url, post_data) - self.assertEqual(response.status_code, 200) - for idx in range(len(response.data['coll_objects'])): - self.assertEqual( - response.data['coll_objects'][idx]['status'], '404' - ) - self.assertEqual(response.data['coll_objects'][idx]['stats'], {}) - - def test_post_coll_not_in_project(self): - """Test POST for stats on collections not belonging to project""" - self.assertEqual( - self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True - ) - post_data = { - 'paths': [IRODS_NON_PROJECT_PATH, IRODS_NON_PROJECT_PATH], - 'md5': ['0', '0'], - } - with self.login(self.user): - response = self.client.post(self.post_url, post_data) - self.assertEqual(response.status_code, 200) - for idx in range(len(response.data['coll_objects'])): - self.assertEqual( - response.data['coll_objects'][idx]['status'], '400' - ) - self.assertEqual(response.data['coll_objects'][idx]['stats'], {}) - - def test_post_no_access(self): - """Test POST for batch stats with no access for collections""" - new_user = self.make_user('new_user') - self.make_assignment( - self.project, new_user, self.role_contributor - ) # No taskflow - post_data = { - 'paths': [self.irods_path, self.irods_path], - 'md5': ['0', '0'], - } - with self.login(new_user): - response = self.client.post(self.post_url, post_data) - self.assertEqual(response.status_code, 200) - for idx in range(len(response.data['coll_objects'])): - self.assertEqual(response.data['coll_objects'][idx]['stats'], {}) - - def test_post_one_empty_coll(self): - """Test POST for batch stats on one (empty) collection""" - post_data = {'paths': [self.irods_path], 'md5': ['0']} - with self.login(self.user): - response = self.client.post(self.post_url, post_data) - self.assertEqual(response.status_code, 200) - for idx in range(len(response.data['coll_objects'])): - self.assertEqual( - response.data['coll_objects'][idx]['status'], '200' - ) - self.assertEqual( - response.data['coll_objects'][idx]['stats']['file_count'], 0 - ) - self.assertEqual( - response.data['coll_objects'][idx]['stats']['total_size'], 0 - ) - - -class TestIrodsObjectListAjaxView(IrodsbackendViewTestBase): - """Tests for the landing zone data object listing Ajax view""" - - def test_get_empty_coll(self): - """Test GET for listing an empty collection in iRODS""" - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='list', - path=self.irods_path, - project=self.project, - md5=0, - ) - ) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data['irods_data']), 0) - - def test_get_coll_obj(self): - """Test GET for listing a collection with a data object""" - # Put data object in iRODS - obj_path = self.irods_path + '/' + IRODS_OBJ_NAME - data_obj = make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) - - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='list', - path=self.irods_path, - project=self.project, - md5=0, - ) - ) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data['irods_data']), 1) - - list_obj = response.data['irods_data'][0] - self.assertNotIn('md5_file', list_obj) - self.assertEqual(data_obj.name, list_obj['name']) - self.assertEqual(data_obj.path, list_obj['path']) - self.assertEqual(data_obj.size, IRODS_OBJ_SIZE) - - def test_get_coll_md5(self): - """Test GET for listing a collection with a data object and md5""" - obj_path = self.irods_path + '/' + IRODS_OBJ_NAME - make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) - # Put MD5 data object in iRODS - md5_path = self.irods_path + '/' + IRODS_MD5_NAME - make_object(self.irods, md5_path, IRODS_OBJ_CONTENT) # Not actual md5 - - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='list', - path=self.irods_path, - project=self.project, - md5=1, - ) - ) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data['irods_data']), 1) # Still 1 - self.assertEqual(response.data['irods_data'][0]['md5_file'], True) - - def test_get_coll_md5_no_file(self): - """Test GET with md5 set True but no md5 file""" - obj_path = self.irods_path + '/' + IRODS_OBJ_NAME - make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) - - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='list', - path=self.irods_path, - project=self.project, - md5=1, - ) - ) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data['irods_data']), 1) - self.assertEqual(response.data['irods_data'][0]['md5_file'], False) - - def test_get_coll_not_found(self): - """Test GET for listing a collection which doesn't exist""" - fail_path = self.irods_path + '/' + IRODS_FAIL_COLL - self.assertEqual(self.irods.collections.exists(fail_path), False) - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='list', path=fail_path, project=self.project, md5=0 - ) - ) - self.assertEqual(response.status_code, 404) - - def test_get_coll_not_in_project(self): - """Test GET for listing a collection not belonging to project""" - self.assertEqual( - self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True - ) - with self.login(self.user): - response = self.client.get( - self.irods_backend.get_url( - view='list', - path=IRODS_NON_PROJECT_PATH, - project=self.project, - md5=0, - ) - ) - self.assertEqual(response.status_code, 400) - - def test_get_no_access(self): - """Test GET for listing with no acces for the iRODS folder""" - new_user = self.make_user('new_user') - self.make_assignment( - self.project, new_user, self.role_contributor - ) # No taskflow - with self.login(new_user): - response = self.client.get( - self.irods_backend.get_url( - view='list', - path=self.irods_path, - project=self.project, - md5=0, - ) - ) - self.assertEqual(response.status_code, 403) - - class TestLocalAuthAPIView(TestCase): """Tests for LocalAuthAPIView""" diff --git a/irodsbackend/tests/test_views_taskflow.py b/irodsbackend/tests/test_views_taskflow.py new file mode 100644 index 00000000..af65ed63 --- /dev/null +++ b/irodsbackend/tests/test_views_taskflow.py @@ -0,0 +1,386 @@ +"""Tests for views in the irodsbackend app with taskflow enabled""" + +import os + +from irods.test.helpers import make_object + +from django.conf import settings +from django.test import RequestFactory + +# Projectroles dependency +from projectroles.models import SODAR_CONSTANTS + +# Taskflowbackend dependency +from taskflowbackend.tests.base import TaskflowViewTestBase + + +# SODAR constants +PROJECT_ROLE_OWNER = SODAR_CONSTANTS['PROJECT_ROLE_OWNER'] +PROJECT_ROLE_DELEGATE = SODAR_CONSTANTS['PROJECT_ROLE_DELEGATE'] +PROJECT_ROLE_CONTRIBUTOR = SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR'] +PROJECT_ROLE_GUEST = SODAR_CONSTANTS['PROJECT_ROLE_GUEST'] +PROJECT_TYPE_CATEGORY = SODAR_CONSTANTS['PROJECT_TYPE_CATEGORY'] +PROJECT_TYPE_PROJECT = SODAR_CONSTANTS['PROJECT_TYPE_PROJECT'] + +# Local constants +IRODS_TEMP_COLL = 'temp' +IRODS_TEMP_COLL2 = 'temp2' +IRODS_OBJ_SIZE = 1024 +IRODS_OBJ_CONTENT = ''.join('x' for _ in range(IRODS_OBJ_SIZE)) +IRODS_OBJ_NAME = 'test1.txt' +IRODS_MD5_NAME = 'test1.txt.md5' +IRODS_NON_PROJECT_PATH = ( + '/' + settings.IRODS_ZONE + '/home/' + settings.IRODS_USER +) +IRODS_FAIL_COLL = 'xeiJ1Vie' + + +class IrodsbackendViewTestBase(TaskflowViewTestBase): + """Base class for irodsbackend UI view testing""" + + def setUp(self): + super().setUp() + self.req_factory = RequestFactory() + # Init project with owner in taskflow + self.project, self.owner_as = self.make_project_taskflow( + 'TestProject', PROJECT_TYPE_PROJECT, self.category, self.user + ) + # Create test collection in iRODS + # NOTE: Not fully valid sample paths, needs make_irods_colls() for that + self.project_path = self.irods_backend.get_path(self.project) + self.irods_path = os.path.join(self.project_path, IRODS_TEMP_COLL) + self.irods_coll = self.irods.collections.create(self.irods_path) + + +class TestIrodsStatisticsAjaxView(IrodsbackendViewTestBase): + """Tests for the landing zone collection statistics Ajax view""" + + def setUp(self): + super().setUp() + self.post_url = self.irods_backend.get_url( + view='stats', project=self.project, method='POST' + ) + + def test_get_empty_coll(self): + """Test GET request for stats on empty iRODS collection""" + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='stats', project=self.project, path=self.irods_path + ) + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data['file_count'], 0) + self.assertEqual(response.data['total_size'], 0) + + def test_get_invalid_coll(self): + """Test GET request with invalid collection (should fail)""" + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='stats', project=self.project, path=self.irods_path + ) + + '%2F..' + ) + self.assertEqual(response.status_code, 400) + + def test_get_coll_obj(self): + """Test GET for stats on collection with data object""" + # Put data object in iRODS + obj_path = self.irods_path + '/' + IRODS_OBJ_NAME + make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='stats', project=self.project, path=self.irods_path + ) + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data['file_count'], 1) + self.assertEqual(response.data['total_size'], IRODS_OBJ_SIZE) + + def test_get_coll_md5(self): + """Test GET for stats on collection with data object and md5""" + # Put data object in iRODS + obj_path = self.irods_path + '/' + IRODS_OBJ_NAME + make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) + # Put MD5 data object in iRODS + md5_path = self.irods_path + '/' + IRODS_MD5_NAME + make_object(self.irods, md5_path, IRODS_OBJ_CONTENT) # Not actual md5 + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='stats', project=self.project, path=self.irods_path + ) + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data['file_count'], 1) # md5 not counted + self.assertEqual(response.data['total_size'], IRODS_OBJ_SIZE) + + def test_get_coll_not_found(self): + """Test GET for stats on non-existing collection""" + fail_path = self.irods_path + '/' + IRODS_FAIL_COLL + self.assertEqual(self.irods.collections.exists(fail_path), False) + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='stats', project=self.project, path=fail_path + ) + ) + self.assertEqual(response.status_code, 404) + + def test_get_coll_not_in_project(self): + """Test GET for stats on collection not belonging to project""" + self.assertEqual( + self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True + ) + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='stats', + project=self.project, + path=IRODS_NON_PROJECT_PATH, + ) + ) + self.assertEqual(response.status_code, 400) + + def test_get_no_access(self): + """Test GET for stats with no access for iRODS collection""" + new_user = self.make_user('new_user') + self.make_assignment( + self.project, new_user, self.role_contributor + ) # No taskflow + with self.login(new_user): + response = self.client.get( + self.irods_backend.get_url( + view='stats', project=self.project, path=self.irods_path + ) + ) + self.assertEqual(response.status_code, 403) + + def test_post_empty_coll(self): + """Test POST on empty iRODS collection""" + post_data = {'paths': [self.irods_path]} + with self.login(self.user): + response = self.client.post(self.post_url, post_data) + self.assertEqual(response.status_code, 200) + response_data = response.data['irods_stats'] + self.assertEqual(len(response_data.values()), 1) + expected = { + self.irods_path: {'status': 200, 'file_count': 0, 'total_size': 0} + } + self.assertEqual(response_data, expected) + + def test_post_non_empty_coll(self): + """Test POST with data object in collection""" + obj_path = os.path.join(self.irods_path, IRODS_OBJ_NAME) + make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) + post_data = {'paths': [self.irods_path]} + with self.login(self.user): + response = self.client.post(self.post_url, post_data) + expected = { + self.irods_path: { + 'status': 200, + 'file_count': 1, + 'total_size': IRODS_OBJ_SIZE, + } + } + self.assertEqual(response.data['irods_stats'], expected) + + def test_post_md5_file(self): + """Test POST with .md5 file in collection""" + obj_path = os.path.join(self.irods_path, IRODS_OBJ_NAME) + obj = make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) + self.make_irods_md5_object(obj) + self.assert_irods_obj( + os.path.join(self.irods_path, IRODS_OBJ_NAME + '.md5') + ) + post_data = {'paths': [self.irods_path]} + with self.login(self.user): + response = self.client.post(self.post_url, post_data) + response_data = response.data['irods_stats'] + self.assertEqual(len(response_data.values()), 1) # md5 not included + expected = { + self.irods_path: { + 'status': 200, + 'file_count': 1, + 'total_size': IRODS_OBJ_SIZE, # md5 file size not included + } + } + self.assertEqual(response_data, expected) + + def test_post_multiple_paths(self): + """Test POST with multiple paths""" + irods_path_new = os.path.join(self.project_path, IRODS_TEMP_COLL2) + self.irods.collections.create(irods_path_new) + obj_path = os.path.join(self.irods_path, IRODS_OBJ_NAME) + make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) + post_data = {'paths': [self.irods_path, irods_path_new]} + with self.login(self.user): + response = self.client.post(self.post_url, post_data) + expected = { + self.irods_path: { + 'status': 200, + 'file_count': 1, + 'total_size': IRODS_OBJ_SIZE, + }, + irods_path_new: {'status': 200, 'file_count': 0, 'total_size': 0}, + } + self.assertEqual(response.data['irods_stats'], expected) + + def test_post_dupe_path(self): + """Test POST with duplicate path""" + # This should not happen in the UI, but testing in case of view abuse + post_data = {'paths': [self.irods_path, self.irods_path]} + with self.login(self.user): + response = self.client.post(self.post_url, post_data) + # We should only get one result + self.assertEqual(len(response.data['irods_stats'].values()), 1) + + def test_post_coll_not_found(self): + """Test POST for stats on non-existing collections""" + fail_path = os.path.join(self.irods_path, IRODS_FAIL_COLL) + self.assertEqual(self.irods.collections.exists(fail_path), False) + post_data = {'paths': [fail_path]} + with self.login(self.user): + response = self.client.post(self.post_url, post_data) + self.assertEqual(response.status_code, 200) + expected = {fail_path: {'status': 404}} + self.assertEqual(response.data['irods_stats'], expected) + + def test_post_non_project_path(self): + """Test POST with path not belonging to project""" + self.assertEqual( + self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True + ) + post_data = {'paths': [IRODS_NON_PROJECT_PATH]} + with self.login(self.user): + response = self.client.post(self.post_url, post_data) + self.assertEqual(response.status_code, 200) + expected = {IRODS_NON_PROJECT_PATH: {'status': 400}} + self.assertEqual(response.data['irods_stats'], expected) + + +class TestIrodsObjectListAjaxView(IrodsbackendViewTestBase): + """Tests for IrodsObjectListAjaxView""" + + def test_get_empty_coll(self): + """Test IrodsObjectListAjaxView GET with empty collection""" + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='list', + path=self.irods_path, + project=self.project, + md5=0, + ) + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data['irods_data']), 0) + + def test_get_data_obj(self): + """Test GET with data object""" + # Put data object in iRODS + obj_path = self.irods_path + '/' + IRODS_OBJ_NAME + data_obj = make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) + + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='list', + path=self.irods_path, + project=self.project, + md5=0, + ) + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data['irods_data']), 1) + + list_obj = response.data['irods_data'][0] + self.assertNotIn('md5_file', list_obj) + self.assertEqual(data_obj.name, list_obj['name']) + self.assertEqual(data_obj.path, list_obj['path']) + self.assertEqual(data_obj.size, IRODS_OBJ_SIZE) + + def test_get_md5_file(self): + """Test GET with data object and md5 file""" + obj_path = self.irods_path + '/' + IRODS_OBJ_NAME + obj = make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) + # Create MD5 data object in iRODS + self.make_irods_md5_object(obj) + + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='list', + path=self.irods_path, + project=self.project, + md5=1, + ) + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data['irods_data']), 1) # Still 1 + self.assertEqual(response.data['irods_data'][0]['md5_file'], True) + + def test_get_md5_no_file(self): + """Test GET with md5 set True and no md5 file""" + obj_path = self.irods_path + '/' + IRODS_OBJ_NAME + make_object(self.irods, obj_path, IRODS_OBJ_CONTENT) + + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='list', + path=self.irods_path, + project=self.project, + md5=1, + ) + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data['irods_data']), 1) + self.assertEqual(response.data['irods_data'][0]['md5_file'], False) + + def test_get_coll_not_found(self): + """Test GET with non-existing collection""" + fail_path = self.irods_path + '/' + IRODS_FAIL_COLL + self.assertEqual(self.irods.collections.exists(fail_path), False) + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='list', path=fail_path, project=self.project, md5=0 + ) + ) + self.assertEqual(response.status_code, 404) + + def test_get_coll_not_in_project(self): + """Test GET with collection not in project""" + self.assertEqual( + self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True + ) + with self.login(self.user): + response = self.client.get( + self.irods_backend.get_url( + view='list', + path=IRODS_NON_PROJECT_PATH, + project=self.project, + md5=0, + ) + ) + self.assertEqual(response.status_code, 400) + + def test_get_no_access(self): + """Test GET with no access to collection""" + new_user = self.make_user('new_user') + self.make_assignment( + self.project, new_user, self.role_contributor + ) # No taskflow + with self.login(new_user): + response = self.client.get( + self.irods_backend.get_url( + view='list', + path=self.irods_path, + project=self.project, + md5=0, + ) + ) + self.assertEqual(response.status_code, 403) diff --git a/irodsbackend/views.py b/irodsbackend/views.py index cc5f877b..c2df8c3c 100644 --- a/irodsbackend/views.py +++ b/irodsbackend/views.py @@ -148,40 +148,31 @@ def get(self, *args, **kwargs): return Response(self._get_detail(ex), status=500) def post(self, request, *args, **kwargs): - data = {'coll_objects': []} - q_dict = request.POST + ret = {} project_path = self.irods_backend.get_path(self.project) try: irods = self.irods_backend.get_session_obj() except Exception as ex: return JsonResponse(self._get_detail(ex), status=500) - for path in q_dict.getlist('paths'): - if project_path not in path: - data['coll_objects'].append( - {'path': path, 'status': '400', 'stats': {}} - ) - break - if not self._check_collection_perm(path, request.user, irods): - data['coll_objects'].append( - {'path': path, 'status': '403', 'stats': {}} - ) - break - try: - if not irods.collections.exists(path): - data['coll_objects'].append( - {'path': path, 'status': '404', 'stats': {}} - ) - else: - ret_data = self.irods_backend.get_object_stats(irods, path) - data['coll_objects'].append( - {'path': path, 'status': '200', 'stats': ret_data} - ) - except Exception: - data['coll_objects'].append( - {'path': path, 'status': '500', 'stats': {}} - ) + for p in request.POST.getlist('paths'): + d = {} + if not p.startswith(project_path): + d['status'] = 400 + elif not self._check_collection_perm(p, request.user, irods): + d['status'] = 403 + else: + try: + if irods.collections.exists(p): + stats = self.irods_backend.get_object_stats(irods, p) + d.update(stats) + d['status'] = 200 + else: + d['status'] = 404 + except Exception: + d['status'] = 500 + ret[p] = d irods.cleanup() - return Response(data, status=200) + return Response({'irods_stats': ret}, status=200) class IrodsObjectListAjaxView(BaseIrodsAjaxView): From c495393cc32dd15667941c4710032ab08d96e760 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Wed, 14 Feb 2024 17:44:16 +0100 Subject: [PATCH 20/33] fix landing zone status update in project detail view (#1902) --- CHANGELOG.rst | 2 + docs_manual/source/sodar_release_notes.rst | 1 + .../static/landingzones/js/landingzones.js | 10 +- .../templates/landingzones/_details_card.html | 7 +- .../templates/landingzones/project_zones.html | 6 +- landingzones/tests/test_ui.py | 156 +++++++++++++++++- 6 files changed, 163 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 69052a98..c35f9124 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -16,6 +16,7 @@ Added - **Landingzones** - Create assay plugin shortcut collections for zones (#1869) - Zone statistics for siteinfo (#1898) + - UI tests for project details card (#1902) - **Taskflowbackend** - Django settings in siteinfo (#1901) @@ -50,6 +51,7 @@ Fixed - Stats badge stuck in updating with non-200 POST status (#1327, #1886) - **Landingzones** - Stats badge displayed to superusers for ``DELETED`` zones (#1866) + - Zone status updating not working in project details card (#1902) - **Samplesheets** - Invalid WebDAV URLs generated in ``IrodsDataRequestListView`` (#1860) - Superuser not allowed to edit iRODS request from other users in UI (#1863) diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 38a1e751..349c0e97 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -19,6 +19,7 @@ Release for minor updates, maintenance and bug fixes. - Fix IRODS_ROOT_PATH issues - Fix LDAP TLS settings - Fix iRODS stats badge stuck in "updating" +- Fix landing zone status updating not working in project details card - Minor updates and bug fixes - Upgrade to SODAR Core v0.13.4 diff --git a/landingzones/static/landingzones/js/landingzones.js b/landingzones/static/landingzones/js/landingzones.js index 299329f2..baa57463 100644 --- a/landingzones/static/landingzones/js/landingzones.js +++ b/landingzones/static/landingzones/js/landingzones.js @@ -20,12 +20,10 @@ var updateZoneStatus = function() { // Make the POST request to retrieve zone statuses if (zoneUuids.length > 0) { $.ajax({ - url: zoneStatusUrl, + url: zoneStatusURL, method: 'POST', dataType: 'JSON', - data: { - zone_uuids: zoneUuids - } + data: {zone_uuids: zoneUuids} }).done(function(data) { $('.sodar-lz-zone-tr-existing').each(function() { var zoneUuid = $(this).attr('data-zone-uuid'); @@ -67,9 +65,7 @@ var updateZoneStatus = function() { 'span#sodar-lz-zone-status-moved-' + zoneUuid ); statusMovedSpan.html( - '

' + + '

' + ' ' + 'Browse files in sample sheet

' ); diff --git a/landingzones/templates/landingzones/_details_card.html b/landingzones/templates/landingzones/_details_card.html index c53dd9ca..99d2e95a 100644 --- a/landingzones/templates/landingzones/_details_card.html +++ b/landingzones/templates/landingzones/_details_card.html @@ -17,16 +17,13 @@ .sodar-lz-detail-table td:first-child { border-left: 0; } - .sodar-lz-detail-table thead tr th:nth-child(1) { width: 250px; } - .sodar-lz-detail-table thead tr th:nth-child(3) { white-space: nowrap; width: 150px; } - /* Responsive modifications */ @media screen and (max-width: 1000px) { .table.sodar-lz-detail-table tr th:nth-child(2) { @@ -41,6 +38,8 @@ @@ -61,7 +60,7 @@ {% include 'landingzones/_zone_item.html' with item=zone details_card_mode=True %} {% endfor %} {% else %} - + No active landing zones found. diff --git a/landingzones/templates/landingzones/project_zones.html b/landingzones/templates/landingzones/project_zones.html index 833864e3..6a0110f5 100644 --- a/landingzones/templates/landingzones/project_zones.html +++ b/landingzones/templates/landingzones/project_zones.html @@ -56,13 +56,11 @@ display: none; } } - @media screen and (max-width: 1200px) { .table.sodar-lz-table thead tr th:nth-child(1) { width: 250px; } } - @media screen and (max-width: 900px) { .table.sodar-lz-table thead tr th:nth-child(2) { display: none; @@ -71,7 +69,6 @@ display: none; } } - @media screen and (max-width: 450px) { .table.sodar-lz-table thead tr th:nth-child(4) { display: none; @@ -157,8 +154,7 @@

Landing Zones

window.zoneStatusUpdated = false; window.statusInterval = {{ zone_status_interval }} * 1000; window.irodsShowChecksumCol = true; - var zoneStatusUrl = "{% url 'landingzones:ajax_status' project=project.sodar_uuid %}"; - + var zoneStatusURL = "{% url 'landingzones:ajax_status' project=project.sodar_uuid %}"; var currentUserURL = "{% url 'projectroles:ajax_user_current' %}"; diff --git a/landingzones/tests/test_ui.py b/landingzones/tests/test_ui.py index 63b02fe3..56c6a038 100644 --- a/landingzones/tests/test_ui.py +++ b/landingzones/tests/test_ui.py @@ -16,7 +16,11 @@ from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR from samplesheets.tests.test_sheet_config import SheetConfigMixin -from landingzones.constants import ZONE_STATUS_CREATING +from landingzones.constants import ( + ZONE_STATUS_CREATING, + ZONE_STATUS_ACTIVE, + ZONE_STATUS_VALIDATING, +) from landingzones.tests.test_models import LandingZoneMixin @@ -27,10 +31,10 @@ SHEET_PATH = SHEET_DIR + 'i_small.zip' -class TestProjectZoneView( +class LandingZoneUITestBase( SampleSheetIOMixin, SheetConfigMixin, LandingZoneMixin, TestUIBase ): - """ProjectZoneView UI tests""" + """Base class for landingzones UI tests""" investigation = None study = None @@ -57,6 +61,14 @@ def _assert_btn_enabled(self, element, expected=True): else: self.assertIn('disabled', element.get_attribute('class')) + def _wait_for_status(self, status_elem, status): + """Wait for a specific status in the zone status element""" + for i in range(0, 25): + if status_elem.text == status: + return + time.sleep(1.0) + raise Exception('Status not changed') + def _wait_for_status_update(self): """Wait for JQuery landing zone status updates to finish""" for i in range(0, 20): @@ -75,6 +87,13 @@ def setUp(self): self.user_delegate, self.user_contributor, ] + + +class TestProjectZoneView(LandingZoneUITestBase): + """UI tests for ProjectZoneView""" + + def setUp(self): + super().setUp() self.url = reverse( 'landingzones:list', kwargs={'project': self.project.sodar_uuid}, @@ -285,6 +304,27 @@ def test_render_other_user_no_access(self): self.assertEqual(len(zones), 2) self._assert_element(By.CLASS_NAME, 'sodar-lz-zone-warn-access', True) + def test_status_update(self): + """Test ProjectZoneView with zone status update""" + self._setup_investigation() + self.investigation.irods_status = True + self.investigation.save() + contrib_zone = self.make_landing_zone( + 'contrib_zone', + self.project, + self.user_contributor, + self.assay, + status='ACTIVE', + ) + self.login_and_redirect(self.user_contributor, self.url) + zone_status = self.selenium.find_element( + By.CLASS_NAME, 'sodar-lz-zone-status' + ) + self.assertEqual(zone_status.text, ZONE_STATUS_ACTIVE) + contrib_zone.set_status(ZONE_STATUS_VALIDATING) + self._wait_for_status(zone_status, ZONE_STATUS_VALIDATING) + self.assertEqual(zone_status.text, ZONE_STATUS_VALIDATING) + def test_zone_buttons(self): """Test ProjectZoneView zone buttons""" self._setup_investigation() @@ -420,3 +460,113 @@ def test_zone_locked_contributor(self): By.CLASS_NAME, 'sodar-lz-zone-status-info' ).get_attribute('class'), ) + + +class TestProjectDetailView(LandingZoneUITestBase): + """UI tests for ProjectDetailView""" + + def setUp(self): + super().setUp() + self.url = reverse( + 'projectroles:detail', + kwargs={'project': self.project.sodar_uuid}, + ) + + def test_render_no_zones(self): + """Test ProjectDetailView with no zones""" + self._setup_investigation() + self.investigation.irods_status = True + self.investigation.save() + self.login_and_redirect(self.user_owner, self.url) + self._assert_element(By.ID, 'sodar-lz-detail-table-no-zones', True) + self._assert_element(By.CLASS_NAME, 'sodar-lz-zone-tr-existing', False) + + def test_render_own_zone(self): + """Test ProjectDetailView as contributor with own zone""" + self._setup_investigation() + self.investigation.irods_status = True + self.investigation.save() + contrib_zone = self.make_landing_zone( + 'contrib_zone', self.project, self.user_contributor, self.assay + ) + self.login_and_redirect(self.user_contributor, self.url) + self._assert_element(By.ID, 'sodar-lz-detail-table-no-zones', False) + zones = self.selenium.find_elements( + By.CLASS_NAME, 'sodar-lz-zone-tr-existing' + ) + self.assertEqual(len(zones), 1) + self.assertEqual( + zones[0].get_attribute('data-zone-uuid'), + str(contrib_zone.sodar_uuid), + ) + + def test_render_multiple_zones(self): + """Test ProjectDetailView as contributor with multiple own zones""" + self._setup_investigation() + self.investigation.irods_status = True + self.investigation.save() + self.make_landing_zone( + 'contrib_zone', self.project, self.user_contributor, self.assay + ) + self.make_landing_zone( + 'contrib_zone2', self.project, self.user_contributor, self.assay + ) + self.login_and_redirect(self.user_contributor, self.url) + zones = self.selenium.find_elements( + By.CLASS_NAME, 'sodar-lz-zone-tr-existing' + ) + self.assertEqual(len(zones), 2) + + def test_render_other_zone(self): + """Test ProjectDetailView as contributor with other user's zone""" + self._setup_investigation() + self.investigation.irods_status = True + self.investigation.save() + self.make_landing_zone( + 'owner_zone', self.project, self.user_owner, self.assay + ) + self.login_and_redirect(self.user_contributor, self.url) + self._assert_element(By.ID, 'sodar-lz-detail-table-no-zones', True) + self._assert_element(By.CLASS_NAME, 'sodar-lz-zone-tr-existing', False) + + def test_render_as_owner(self): + """Test ProjectDetailView as owner with own and other zones""" + self._setup_investigation() + self.investigation.irods_status = True + self.investigation.save() + owner_zone = self.make_landing_zone( + 'owner_zone', self.project, self.user_owner, self.assay + ) + self.make_landing_zone( + 'contrib_zone', self.project, self.user_contributor, self.assay + ) + self.login_and_redirect(self.user_owner, self.url) + zones = self.selenium.find_elements( + By.CLASS_NAME, 'sodar-lz-zone-tr-existing' + ) + self.assertEqual(len(zones), 1) # Only own zone should be visible + self.assertEqual( + zones[0].get_attribute('data-zone-uuid'), + str(owner_zone.sodar_uuid), + ) + + def test_update_status(self): + """Test ProjectDetailView with zone status update""" + self._setup_investigation() + self.investigation.irods_status = True + self.investigation.save() + contrib_zone = self.make_landing_zone( + 'contrib_zone', + self.project, + self.user_contributor, + self.assay, + status='ACTIVE', + ) + self.login_and_redirect(self.user_contributor, self.url) + zone_status = self.selenium.find_element( + By.CLASS_NAME, 'sodar-lz-zone-status' + ) + self.assertEqual(zone_status.text, ZONE_STATUS_ACTIVE) + contrib_zone.set_status(ZONE_STATUS_VALIDATING) + self._wait_for_status(zone_status, ZONE_STATUS_VALIDATING) + self.assertEqual(zone_status.text, ZONE_STATUS_VALIDATING) From c8f59a717aec099b5a5a2e4557b75ab7d803dd3f Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Fri, 16 Feb 2024 15:54:44 +0100 Subject: [PATCH 21/33] upgrade to django-sodar-core v0.13.4 final (#1899) --- CHANGELOG.rst | 2 +- requirements/base.txt | 4 ++-- samplesheets/tests/test_views_taskflow.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c35f9124..5055022b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -25,7 +25,7 @@ Changed - **General** - Upgrade to Django v3.2.24 (#1854) - - Upgrade to django-sodar-core v0.13.4-WIP (#1899) + - Upgrade to django-sodar-core v0.13.4 (#1899) - Upgrade critical Vue app dependencies (#1854) - Update installation documentation (#1871) - **Irodsbackend** diff --git a/requirements/base.txt b/requirements/base.txt index 68bffe44..97b2b90e 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -70,8 +70,8 @@ mistune==2.0.5 django-autocomplete-light==3.9.4 # SODAR Core -# django-sodar-core==0.13.3 --e git+https://github.com/bihealth/sodar-core.git@1117ff87723389be928538b965fe89576687c1a6#egg=django-sodar-core +django-sodar-core==0.13.4 +# -e git+https://github.com/bihealth/sodar-core.git@be012e5536bacf8bfbfe95e3c930324edae0309b#egg=django-sodar-core # Celery celery==5.2.7 diff --git a/samplesheets/tests/test_views_taskflow.py b/samplesheets/tests/test_views_taskflow.py index 260798ec..70957895 100644 --- a/samplesheets/tests/test_views_taskflow.py +++ b/samplesheets/tests/test_views_taskflow.py @@ -20,6 +20,7 @@ from projectroles.app_settings import AppSettingAPI from projectroles.models import SODAR_CONSTANTS from projectroles.plugins import get_backend_api +from projectroles.views import MSG_NO_AUTH # Appalerts dependency from appalerts.models import AppAlert @@ -2248,8 +2249,7 @@ def test_reject_contributor(self): self.assertRedirects(response, reverse('home')) self.assertEqual( - list(get_messages(response.wsgi_request))[-1].message, - 'User not authorized for requested action.', + list(get_messages(response.wsgi_request))[-1].message, MSG_NO_AUTH ) request.refresh_from_db() self.assertEqual(request.status, IRODS_REQUEST_STATUS_ACTIVE) From 814f96012472f427c47e228924737b9788f0a58a Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Mon, 19 Feb 2024 15:46:07 +0100 Subject: [PATCH 22/33] fix uncaught SetAccessTask exceptions (#1906) --- CHANGELOG.rst | 1 + taskflowbackend/tasks/irods_tasks.py | 23 ++++++++++++----------- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5055022b..2c92553f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -61,6 +61,7 @@ Fixed - Sheet template creation failure with slash characters in title/ID fields (#1896) - **Taskflowbackend** - Hardcoded iRODS path length in ``landing_zone_move`` (#1888) + - Uncaught exceptions in ``SetAccessTask`` (#1906) Removed ------- diff --git a/taskflowbackend/tasks/irods_tasks.py b/taskflowbackend/tasks/irods_tasks.py index 5f60bc20..e8108d4f 100644 --- a/taskflowbackend/tasks/irods_tasks.py +++ b/taskflowbackend/tasks/irods_tasks.py @@ -292,17 +292,21 @@ def execute( *args, **kwargs ): - if obj_target: - target = self.irods.data_objects.get(path) - recursive = False - else: - target = self.irods.collections.get(path) - recursive = recursive + try: + if obj_target: + target = self.irods.data_objects.get(path) + recursive = False + else: + target = self.irods.collections.get(path) + recursive = recursive + target_access = self.irods.permissions.get(target=target) + except Exception as ex: + self._raise_irods_exception(ex, path) - target_access = self.irods.permissions.get(target=target) user_access = next( (x for x in target_access if x.user_name == user_name), None ) + modifying_data = False if ( user_access and user_access.access_name != access_lookup[access_name] @@ -314,8 +318,6 @@ def execute( elif not user_access: self.execute_data['access_name'] = 'null' modifying_data = True - else: - modifying_data = False if modifying_data: acl = iRODSAccess( @@ -327,8 +329,7 @@ def execute( try: self.irods.permissions.set(acl, recursive=recursive) except Exception as ex: - ex_info = user_name - self._raise_irods_exception(ex, ex_info) + self._raise_irods_exception(ex, user_name) self.data_modified = True super().execute(*args, **kwargs) From 60e27d3b71a15786450da738adff8d11aa171940 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Tue, 20 Feb 2024 16:35:44 +0100 Subject: [PATCH 23/33] fix landing_zone_create crash with large projects (#1905) --- CHANGELOG.rst | 3 + docs_manual/source/sodar_release_notes.rst | 1 + taskflowbackend/flows/landing_zone_create.py | 24 +- taskflowbackend/tasks/irods_tasks.py | 203 +++++++++++++---- taskflowbackend/tests/test_tasks.py | 217 ++++++++++++++++++- 5 files changed, 388 insertions(+), 60 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 2c92553f..5ebbe96c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -19,6 +19,8 @@ Added - UI tests for project details card (#1902) - **Taskflowbackend** - Django settings in siteinfo (#1901) + - ``BatchSetAccessTask`` in iRODS tasks (#1905) + - ``IrodsAccessMixin`` task helper mixin (#1905) Changed ------- @@ -62,6 +64,7 @@ Fixed - **Taskflowbackend** - Hardcoded iRODS path length in ``landing_zone_move`` (#1888) - Uncaught exceptions in ``SetAccessTask`` (#1906) + - Crash in ``landing_zone_create`` with large amount of collections (#1905) Removed ------- diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 349c0e97..538b1648 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -20,6 +20,7 @@ Release for minor updates, maintenance and bug fixes. - Fix LDAP TLS settings - Fix iRODS stats badge stuck in "updating" - Fix landing zone status updating not working in project details card +- Fix landing zone creation crash with large amount of created collections - Minor updates and bug fixes - Upgrade to SODAR Core v0.13.4 diff --git a/taskflowbackend/flows/landing_zone_create.py b/taskflowbackend/flows/landing_zone_create.py index 94550cc3..b9a87948 100644 --- a/taskflowbackend/flows/landing_zone_create.py +++ b/taskflowbackend/flows/landing_zone_create.py @@ -154,26 +154,30 @@ def build(self, force_fail=False): ) ) # Create collections - for d in self.flow_data['colls']: - coll_path = os.path.join(zone_path, d) + if self.flow_data['colls']: + colls_full_path = [ + os.path.join(zone_path, c) for c in self.flow_data['colls'] + ] + coll_count = len(self.flow_data['colls']) self.add_task( - irods_tasks.CreateCollectionTask( - name='Create collection {}'.format(coll_path), + irods_tasks.BatchCreateCollectionsTask( + name='Batch create {} collection{}'.format( + coll_count, 's' if coll_count != 1 else '' + ), irods=self.irods, - inject={'path': coll_path}, + inject={'paths': colls_full_path}, ) ) # Enforce collection access if set if self.flow_data['restrict_colls']: self.add_task( - irods_tasks.SetAccessTask( - name='Set user owner access to collection {}'.format( - coll_path - ), + irods_tasks.BatchSetAccessTask( + name='Batch set user owner access to created ' + 'collections', irods=self.irods, inject={ 'access_name': 'own', - 'path': coll_path, + 'paths': colls_full_path, 'user_name': zone.user.username, 'access_lookup': access_lookup, 'irods_backend': self.irods_backend, diff --git a/taskflowbackend/tasks/irods_tasks.py b/taskflowbackend/tasks/irods_tasks.py index e8108d4f..ee931927 100644 --- a/taskflowbackend/tasks/irods_tasks.py +++ b/taskflowbackend/tasks/irods_tasks.py @@ -26,6 +26,96 @@ MD5_RE = re.compile(r'([^\w.])') +# Mixins ----------------------------------------------------------------------- + + +class IrodsAccessMixin: + """Mixin for iRODS access helpers""" + + def execute_set_access( + self, + access_name, + path, + user_name, + access_lookup, + obj_target, + recursive, + ): + """ + Set access for user in a single data object or collection. + + :param access_name: Access level to set (string) + :param path: Full iRODS path to collection or data object (string) + :param user_name: Name of user or group (string) + :param access_lookup: Access level lookup for iRODS compatibility (dict) + :param obj_target: Whether target is a data object (boolean) + :param recursive: Set collection access recursively if True (boolean) + """ + if not self.execute_data.get('access_names'): + self.execute_data['access_names'] = {} + if obj_target: + target = self.irods.data_objects.get(path) + recursive = False + else: + target = self.irods.collections.get(path) + recursive = recursive + target_access = self.irods.permissions.get(target=target) + + user_access = next( + (x for x in target_access if x.user_name == user_name), None + ) + modifying_data = False + if ( + user_access + and user_access.access_name != access_lookup[access_name] + ): + self.execute_data['access_names'][path] = access_lookup[ + user_access.access_name + ] + modifying_data = True + elif not user_access: + self.execute_data['access_names'][path] = 'null' + modifying_data = True + + if modifying_data: + acl = iRODSAccess( + access_name=access_name, + path=path, + user_name=user_name, + user_zone=self.irods.zone, + ) + self.irods.permissions.set(acl, recursive=recursive) + self.data_modified = True # Access was modified + + def revert_set_access( + self, + path, + user_name, + obj_target, + recursive, + ): + """ + Revert setting access for user in a single collection or data object. + + :param path: Full iRODS path to collection or data object (string) + :param user_name: Name of user or group (string) + :param obj_target: Whether target is a data object (boolean) + :param recursive: Set collection access recursively if True (boolean) + """ + if self.data_modified: + acl = iRODSAccess( + access_name=self.execute_data['access_names'][path], + path=path, + user_name=user_name, + user_zone=self.irods.zone, + ) + recursive = False if obj_target else recursive + self.irods.permissions.set(acl, recursive=recursive) + + +# Base Task -------------------------------------------------------------------- + + class IrodsBaseTask(BaseTask): """Base iRODS task""" @@ -51,6 +141,9 @@ def _raise_irods_exception(self, ex, info=None): raise Exception(desc) +# Tasks ------------------------------------------------------------------------ + + class CreateCollectionTask(IrodsBaseTask): """ Create collection and its parent collections if they doesn't exist (imkdir) @@ -274,7 +367,7 @@ def revert(self, path, inherit=True, *args, **kwargs): ''' -class SetAccessTask(IrodsBaseTask): +class SetAccessTask(IrodsAccessMixin, IrodsBaseTask): """ Set user/group access to target (ichmod). If the target is a data object (obj_target=True), the recursive argument will be ignored. @@ -293,44 +386,16 @@ def execute( **kwargs ): try: - if obj_target: - target = self.irods.data_objects.get(path) - recursive = False - else: - target = self.irods.collections.get(path) - recursive = recursive - target_access = self.irods.permissions.get(target=target) - except Exception as ex: - self._raise_irods_exception(ex, path) - - user_access = next( - (x for x in target_access if x.user_name == user_name), None - ) - modifying_data = False - if ( - user_access - and user_access.access_name != access_lookup[access_name] - ): - self.execute_data['access_name'] = access_lookup[ - user_access.access_name - ] - modifying_data = True - elif not user_access: - self.execute_data['access_name'] = 'null' - modifying_data = True - - if modifying_data: - acl = iRODSAccess( - access_name=access_name, - path=path, - user_name=user_name, - user_zone=self.irods.zone, + self.execute_set_access( + access_name, + path, + user_name, + access_lookup, + obj_target, + recursive, ) - try: - self.irods.permissions.set(acl, recursive=recursive) - except Exception as ex: - self._raise_irods_exception(ex, user_name) - self.data_modified = True + except Exception as ex: + self._raise_irods_exception(ex, user_name) super().execute(*args, **kwargs) def revert( @@ -345,15 +410,10 @@ def revert( *args, **kwargs ): - if self.data_modified: - acl = iRODSAccess( - access_name=self.execute_data['access_name'], - path=path, - user_name=user_name, - user_zone=self.irods.zone, - ) - recursive = False if obj_target else recursive - self.irods.permissions.set(acl, recursive=recursive) + try: + self.revert_set_access(path, user_name, obj_target, recursive) + except Exception: + pass # TODO: Log revert() exceptions? class IssueTicketTask(IrodsBaseTask): @@ -496,7 +556,53 @@ def revert(self, src_path, dest_path, *args, **kwargs): self.irods.data_objects.move(src_path=new_src, dest_path=new_dest) -# Batch file tasks ------------------------------------------------------------- +# Batch Tasks ------------------------------------------------------------------ + + +class BatchSetAccessTask(IrodsAccessMixin, IrodsBaseTask): + """ + Set user/group access to multiple targets (ichmod). If a target is a data + object (obj_target=True), the recursive argument will be ignored. + """ + + def execute( + self, + access_name, + paths, + user_name, + access_lookup, + irods_backend, + obj_target=False, + recursive=True, + *args, + **kwargs + ): + # NOTE: Exception handling is done within execute_set_access() + for path in paths: + self.execute_set_access( + access_name, + path, + user_name, + access_lookup, + obj_target, + recursive, + ) + super().execute(*args, **kwargs) + + def revert( + self, + access_name, + paths, + user_name, + access_lookup, + irods_backend, + obj_target=False, + recursive=True, + *args, + **kwargs + ): + for path in paths: + self.revert_set_access(path, user_name, obj_target, recursive) class BatchCheckFilesTask(IrodsBaseTask): @@ -666,6 +772,7 @@ def execute( 'Error getting permissions of target "{}"'.format(target), ) + # TODO: Remove repetition, use IrodsAccessMixin user_access = next( (x for x in target_access if x.user_name == user_name), None ) diff --git a/taskflowbackend/tests/test_tasks.py b/taskflowbackend/tests/test_tasks.py index 3f5a353a..62f455ef 100644 --- a/taskflowbackend/tests/test_tasks.py +++ b/taskflowbackend/tests/test_tasks.py @@ -34,6 +34,7 @@ NEW_COLL2_NAME = 'test_new2' TEST_OBJ_NAME = 'move_obj' SUB_COLL_NAME = 'sub' +SUB_COLL_NAME2 = 'sub2' MOVE_COLL_NAME = 'move_coll' TEST_USER = USER_PREFIX + 'user3' @@ -860,7 +861,7 @@ def test_execute_twice(self): self.assertEqual(user_access.access_name, self.irods_access_read) def test_revert_created(self): - """Test access setting""" + """Test reverting created access""" self._add_task( cls=SetAccessTask, name='Set access', @@ -883,7 +884,7 @@ def test_revert_created(self): # self.assertEqual(user_access.access_name, TEST_ACCESS_NULL) def test_revert_modified(self): - """Test access setting reverting after modification""" + """Test reverting modified access""" self._add_task( cls=SetAccessTask, name='Set access', @@ -1607,6 +1608,218 @@ def test_overwrite_failure(self): # TODO: Test Checksum verifying +class TestBatchSetAccessTask(IRODSTaskTestBase): + """Tests for BatchSetAccessTask""" + + def setUp(self): + super().setUp() + self.sub_coll_path = os.path.join(self.test_coll_path, SUB_COLL_NAME) + self.sub_coll_path2 = os.path.join(self.test_coll_path, SUB_COLL_NAME2) + self.irods.collections.create(self.sub_coll_path) + self.irods.collections.create(self.sub_coll_path2) + self.paths = [self.sub_coll_path, self.sub_coll_path2] + # Init default user group + self.irods.user_groups.create(DEFAULT_USER_GROUP) + self.access_lookup = self.irods_backend.get_access_lookup(self.irods) + + def test_execute_read(self): + """Test access setting for read""" + self._add_task( + cls=BatchSetAccessTask, + name='Set access', + inject={ + 'access_name': IRODS_ACCESS_READ_IN, + 'paths': self.paths, + 'user_name': DEFAULT_USER_GROUP, + 'access_lookup': self.access_lookup, + 'irods_backend': self.irods_backend, + }, + ) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path, None) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None) + result = self._run_flow() + + self.assertEqual(result, True) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read + ) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_read + ) + + def test_execute_write(self): + """Test access setting for write/modify""" + self._add_task( + cls=BatchSetAccessTask, + name='Set access', + inject={ + 'access_name': IRODS_ACCESS_WRITE_IN, + 'paths': self.paths, + 'user_name': DEFAULT_USER_GROUP, + 'access_lookup': self.access_lookup, + 'irods_backend': self.irods_backend, + }, + ) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path, None) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None) + result = self._run_flow() + + self.assertEqual(result, True) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_write + ) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_write + ) + + def test_execute_mixed(self): + """Test access setting for both new and previously set access levels""" + self._add_task( + cls=SetAccessTask, + name='Set access', + inject={ + 'access_name': IRODS_ACCESS_READ_IN, + 'path': self.sub_coll_path, + 'user_name': DEFAULT_USER_GROUP, + 'access_lookup': self.access_lookup, + 'irods_backend': self.irods_backend, + }, + ) + self._run_flow() + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read + ) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None) + + self.flow = self._init_flow() + self._add_task( + cls=BatchSetAccessTask, + name='Set access', + inject={ + 'access_name': IRODS_ACCESS_WRITE_IN, + 'paths': self.paths, + 'user_name': DEFAULT_USER_GROUP, + 'access_lookup': self.access_lookup, + 'irods_backend': self.irods_backend, + }, + ) + result = self._run_flow() + + self.assertEqual(result, True) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_write + ) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_write + ) + + def test_revert_created(self): + """Test reverting created access""" + self._add_task( + cls=BatchSetAccessTask, + name='Set access', + inject={ + 'access_name': IRODS_ACCESS_READ_IN, + 'paths': self.paths, + 'user_name': DEFAULT_USER_GROUP, + 'access_lookup': self.access_lookup, + 'irods_backend': self.irods_backend, + }, + force_fail=True, + ) # FAIL + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path, None) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None) + result = self._run_flow() + + self.assertNotEqual(result, True) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path, None) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None) + + def test_revert_modified(self): + """Test reverting modified access""" + self._add_task( + cls=BatchSetAccessTask, + name='Set access', + inject={ + 'access_name': IRODS_ACCESS_READ_IN, + 'paths': self.paths, + 'user_name': DEFAULT_USER_GROUP, + 'access_lookup': self.access_lookup, + 'irods_backend': self.irods_backend, + }, + ) + self._run_flow() + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read + ) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_read + ) + + self.flow = self._init_flow() + self._add_task( + cls=BatchSetAccessTask, + name='Set access', + inject={ + 'access_name': IRODS_ACCESS_WRITE_IN, + 'paths': self.paths, + 'user_name': DEFAULT_USER_GROUP, + 'access_lookup': self.access_lookup, + 'irods_backend': self.irods_backend, + }, + force_fail=True, + ) # FAIL + result = self._run_flow() + + self.assertNotEqual(result, True) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read + ) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_read + ) + + def test_revert_mixed(self): + """Test reverting access for both new and existing access levels""" + self._add_task( + cls=SetAccessTask, + name='Set access', + inject={ + 'access_name': IRODS_ACCESS_READ_IN, + 'path': self.sub_coll_path, + 'user_name': DEFAULT_USER_GROUP, + 'access_lookup': self.access_lookup, + 'irods_backend': self.irods_backend, + }, + ) + self._run_flow() + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read + ) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None) + + self.flow = self._init_flow() + self._add_task( + cls=BatchSetAccessTask, + name='Set access', + inject={ + 'access_name': IRODS_ACCESS_WRITE_IN, + 'paths': self.paths, + 'user_name': DEFAULT_USER_GROUP, + 'access_lookup': self.access_lookup, + 'irods_backend': self.irods_backend, + }, + force_fail=True, + ) # FAIL + result = self._run_flow() + + self.assertNotEqual(result, True) + self.assert_irods_access( + DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read + ) + self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None) + + class TestBatchCreateCollectionsTask(IRODSTaskTestBase): """Tests for BatchCreateCollectionsTask""" From 33534cbd29e98e702b10b674e151bab24af394fb Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Fri, 23 Feb 2024 15:52:28 +0100 Subject: [PATCH 24/33] add IrodsDataRequest timeline event extra data (#1912, #1913) --- CHANGELOG.rst | 3 + docs_manual/source/sodar_release_notes.rst | 1 + samplesheets/tests/test_views_taskflow.py | 151 ++++++++++++--------- samplesheets/views.py | 67 +++------ samplesheets/views_ajax.py | 10 +- samplesheets/views_api.py | 6 +- 6 files changed, 111 insertions(+), 127 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5ebbe96c..f0fc32b4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -17,6 +17,8 @@ Added - Create assay plugin shortcut collections for zones (#1869) - Zone statistics for siteinfo (#1898) - UI tests for project details card (#1902) +- **Samplesheets** + - ``IrodsDataRequest`` timeline event extra data (#1912) - **Taskflowbackend** - Django settings in siteinfo (#1901) - ``BatchSetAccessTask`` in iRODS tasks (#1905) @@ -38,6 +40,7 @@ Changed - **Samplesheets** - Improve Django messages for ``IrodsDataRequest`` exceptions (#1858) - Change ``IrodsDataRequest`` description if created in Ajax view (#1862) + - Refactor ``IrodsDataRequestModifyMixin`` timeline helpers (#1913) - **Taskflowbackend** - Optimize ``landing_zone_move`` iRODS path retrieval (#1882) - Set zone status on uncaught errors in ``run_flow()`` (#1458) diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 538b1648..4af59d98 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -14,6 +14,7 @@ v0.14.2 (WIP) Release for minor updates, maintenance and bug fixes. - Add assay plugin shortcut collection creation in landing zones +- Add iRODS data request details in timeline events - Add landing zone statistics in siteinfo - Fix iRODS data request issues - Fix IRODS_ROOT_PATH issues diff --git a/samplesheets/tests/test_views_taskflow.py b/samplesheets/tests/test_views_taskflow.py index 70957895..3671a7f3 100644 --- a/samplesheets/tests/test_views_taskflow.py +++ b/samplesheets/tests/test_views_taskflow.py @@ -1161,10 +1161,10 @@ def setUp(self): class TestIrodsDataRequestCreateView(IrodsDataRequestViewTestBase): - """Test IrodsDataRequestCreateView""" + """Tests for IrodsDataRequestCreateView""" - def test_create(self): - """Test creating a delete request""" + def test_post(self): + """Test IrodsDataRequestCreateView POST""" self.assertEqual(IrodsDataRequest.objects.count(), 0) self._assert_tl_count(EVENT_CREATE, 0) self._assert_alert_count(EVENT_CREATE, self.user, 0) @@ -1195,11 +1195,19 @@ def test_create(self): self.assertEqual(obj.path, self.obj_path) self.assertEqual(obj.description, IRODS_REQUEST_DESC) self._assert_tl_count(EVENT_CREATE, 1) + self.assertEqual( + ProjectEvent.objects.get(event_name=EVENT_CREATE).extra_data, + { + 'action': IRODS_REQUEST_ACTION_DELETE, + 'path': obj.path, + 'description': obj.description, + }, + ) self._assert_alert_count(EVENT_CREATE, self.user, 1) self._assert_alert_count(EVENT_CREATE, self.user_delegate, 1) - def test_create_trailing_slash(self): - """Test creating a delete request with trailing slash in path""" + def test_post_trailing_slash(self): + """Test POST with trailing slash in path""" self.assertEqual(IrodsDataRequest.objects.count(), 0) post_data = { 'path': self.obj_path + '/', @@ -1231,8 +1239,8 @@ def test_create_trailing_slash(self): self.assertEqual(obj.path, self.obj_path) self.assertEqual(obj.description, IRODS_REQUEST_DESC) - def test_create_invalid_form_data(self): - """Test creating a delete request with invalid form data""" + def test_post_invalid_data(self): + """Test POST with invalid form data""" self.assertEqual(IrodsDataRequest.objects.count(), 0) self._assert_tl_count(EVENT_CREATE, 0) self._assert_alert_count(EVENT_CREATE, self.user, 0) @@ -1257,8 +1265,8 @@ def test_create_invalid_form_data(self): self._assert_alert_count(EVENT_CREATE, self.user, 0) self._assert_alert_count(EVENT_CREATE, self.user_delegate, 0) - def test_create_invalid_path_assay_collection(self): - """Test creating a delete request with assay path (should fail)""" + def test_post_assay_path(self): + """Test POST with assay path (should fail)""" self.assertEqual(IrodsDataRequest.objects.count(), 0) post_data = {'path': self.assay_path, 'description': IRODS_REQUEST_DESC} @@ -1278,7 +1286,7 @@ def test_create_invalid_path_assay_collection(self): self.assertEqual(IrodsDataRequest.objects.count(), 0) def test_create_multiple(self): - """Test creating multiple_requests""" + """Test POST with multiple requests for same path""" path2 = os.path.join(self.assay_path, IRODS_FILE_NAME2) self.irods.data_objects.create(path2) self.assertEqual(IrodsDataRequest.objects.count(), 0) @@ -1359,6 +1367,14 @@ def test_post(self): self.assertEqual(self.request.path, self.obj_path) self.assertEqual(self.request.description, IRODS_REQUEST_DESC_UPDATE) self._assert_tl_count(EVENT_UPDATE, 1) + self.assertEqual( + ProjectEvent.objects.get(event_name=EVENT_UPDATE).extra_data, + { + 'action': IRODS_REQUEST_ACTION_DELETE, + 'path': self.request.path, + 'description': self.request.description, + }, + ) def test_post_superuser(self): """Test POST as superuser""" @@ -1406,7 +1422,7 @@ def test_post_invalid_form_data(self): class TestIrodsDataRequestDeleteView( IrodsDataRequestMixin, IrodsDataRequestViewTestBase ): - """Tests for IrodsDataRequestUpdateView""" + """Tests for IrodsDataRequestDeleteView""" def setUp(self): super().setUp() @@ -1415,8 +1431,8 @@ def setUp(self): kwargs={'project': self.project.sodar_uuid}, ) - def test_delete_contributor(self): - """Test POST request for deleting a request""" + def test_post(self): + """Test IrodsDataRequestDeleteView POST""" # NOTE: We use post() to ensure alerts are created with self.login(self.user_contributor): self.client.post(self.create_url, self.post_data) @@ -1448,13 +1464,16 @@ def test_delete_contributor(self): ) self.assertEqual(IrodsDataRequest.objects.count(), 0) self.assert_irods_obj(self.obj_path) - # Create requests should be deleted self._assert_tl_count(EVENT_DELETE, 1) + self.assertEqual( + ProjectEvent.objects.get(event_name=EVENT_DELETE).extra_data, {} + ) + # Create alerts should be deleted self._assert_alert_count(EVENT_CREATE, self.user, 0) self._assert_alert_count(EVENT_CREATE, self.user_delegate, 0) - def test_delete_one_of_multiple(self): - """Test deleting one of multiple requests""" + def test_post_one_of_multiple(self): + """Test POST for one of multiple requests""" self._assert_tl_count(EVENT_DELETE, 0) obj_path2 = os.path.join(self.assay_path, IRODS_FILE_NAME2) self.irods.data_objects.create(obj_path2) @@ -1500,8 +1519,8 @@ def setUp(self): kwargs={'project': self.project.sodar_uuid}, ) - def test_render(self): - """Test rendering IrodsDataRequestAcceptView""" + def test_get(self): + """Test IrodsDataRequestAcceptView GET""" self.assert_irods_obj(self.obj_path) self.make_irods_request( project=self.project, @@ -1523,8 +1542,8 @@ def test_render(self): ) self.assertEqual(response.context['request_objects'][0], obj) - def test_render_coll(self): - """Test rendering IrodsDataRequestAcceptView with collection request""" + def test_get_coll(self): + """Test GET with collection request""" coll_path = os.path.join(self.assay_path, 'request_coll') self.irods.collections.create(coll_path) self.assertEqual(self.irods.collections.exists(coll_path), True) @@ -1547,8 +1566,8 @@ def test_render_coll(self): ) self.assertEqual(response.context['request_objects'][0], request) - def test_accept(self): - """Test accepting a delete request""" + def test_post(self): + """Test POST to accept delete request""" self.assert_irods_obj(self.obj_path) with self.login(self.user_contributor): self.client.post(self.create_url, self.post_data) @@ -1589,8 +1608,8 @@ def test_accept(self): self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0) self.assert_irods_obj(self.obj_path, False) - def test_accept_no_request(self): - """Test accepting non-existing delete request""" + def test_post_no_request(self): + """Test POST with non-existing delete request""" self.assertEqual(IrodsDataRequest.objects.count(), 0) with self.login(self.user_owner_cat): response = self.client.post( @@ -1602,8 +1621,8 @@ def test_accept_no_request(self): ) self.assertEqual(response.status_code, 404) - def test_accept_invalid_form_data(self): - """Test accepting delete request with invalid form data""" + def test_post_invalid_data(self): + """Test POST with invalid form data""" self.assert_irods_obj(self.obj_path) with self.login(self.user_contributor): self.client.post(self.create_url, self.post_data) @@ -1636,8 +1655,8 @@ def test_accept_invalid_form_data(self): self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0) self.assert_irods_obj(self.obj_path) - def test_accept_owner(self): - """Test accepting delete request as owner""" + def test_post_as_owner(self): + """Test POST as owner""" self.assert_irods_obj(self.obj_path) request = self.make_irods_request( project=self.project, @@ -1666,8 +1685,8 @@ def test_accept_owner(self): self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0) self._assert_alert_count(EVENT_ACCEPT, self.user_contributor, 1) - def test_accept_delegate(self): - """Test accepting delete request as delegate""" + def test_post_delegate(self): + """Test POST as delegate""" self.assert_irods_obj(self.obj_path) request = self.make_irods_request( project=self.project, @@ -1696,8 +1715,8 @@ def test_accept_delegate(self): self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0) self._assert_alert_count(EVENT_ACCEPT, self.user_contributor, 1) - def test_accept_contributor(self): - """Test accepting delete request as contributor""" + def test_post_contributor(self): + """Test POST as contributor""" self.assert_irods_obj(self.obj_path) request = self.make_irods_request( project=self.project, @@ -1727,8 +1746,8 @@ def test_accept_contributor(self): self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0) self._assert_alert_count(EVENT_ACCEPT, self.user_contributor, 0) - def test_accept_one_of_multiple(self): - """Test accepting one of multiple requests""" + def test_post_one_of_multiple(self): + """Test POST for one of multiple requests""" obj_path2 = os.path.join(self.assay_path, IRODS_FILE_NAME2) self.irods.data_objects.create(obj_path2) self.assert_irods_obj(self.obj_path) @@ -1772,8 +1791,8 @@ def test_accept_one_of_multiple(self): self.assert_irods_obj(obj_path2, False) @override_settings(REDIS_URL=INVALID_REDIS_URL) - def test_accept_lock_failure(self): - """Test accepting a delete request with project lock failure""" + def test_post_lock_failure(self): + """Test POST with project lock failure""" self.assert_irods_obj(self.obj_path) with self.login(self.user_contributor): self.client.post(self.create_url, self.post_data) @@ -1804,8 +1823,8 @@ def test_accept_lock_failure(self): self._assert_alert_count(EVENT_CREATE, self.user_delegate, 1) self.assert_irods_obj(self.obj_path, True) - def test_accept_collection(self): - """Test accepting a collection request with multiple objects inside""" + def test_post_collection(self): + """Test POST with multiple objects in collection""" coll_path = os.path.join(self.assay_path, 'request_coll') obj_path2 = os.path.join(coll_path, IRODS_FILE_NAME) self.irods.collections.create(coll_path) @@ -1858,8 +1877,8 @@ def setUp(self): kwargs={'project': self.project.sodar_uuid}, ) - def test_render(self): - """Test rendering IrodsDataRequestAcceptBatchView""" + def test_get(self): + """Test IrodsDataRequestAcceptBatchView GET""" self.assert_irods_obj(self.obj_path) self.make_irods_request( project=self.project, @@ -1911,8 +1930,8 @@ def test_render(self): IrodsDataRequest.objects.last(), ) - def test_render_coll(self): - """Test rendering IrodsDataRequestAcceptBatchView for a collection""" + def test_get_coll(self): + """Test GET with collection""" coll_path = os.path.join(self.assay_path, 'request_coll') self.irods.collections.create(coll_path) self.assertEqual(self.irods.collections.exists(coll_path), True) @@ -1946,8 +1965,8 @@ def test_render_coll(self): response.context['affected_object_paths'][0], coll_path ) - def test_accept(self): - """Test accepting a delete request""" + def test_post(self): + """Test POST to accept delete requests""" self.assert_irods_obj(self.obj_path) self.assert_irods_obj(self.obj_path2) @@ -2014,8 +2033,8 @@ def test_accept(self): self.assert_irods_obj(self.obj_path, False) self.assert_irods_obj(self.obj_path2, False) - def test_accept_no_request(self): - """Test accepting a delete request that doesn't exist""" + def test_post_no_request(self): + """Test POST with non-existing delete request""" self.assertEqual(IrodsDataRequest.objects.count(), 0) with self.login(self.user_owner_cat): response = self.client.post( @@ -2031,8 +2050,8 @@ def test_accept_no_request(self): NO_REQUEST_MSG, ) - def test_accept_invalid_form_data(self): - """Test accepting a delete request with invalid form data""" + def test_post_invalid_data(self): + """Test POST with invalid form data""" self.assert_irods_obj(self.obj_path) self.assert_irods_obj(self.obj_path2) @@ -2074,8 +2093,8 @@ def test_accept_invalid_form_data(self): self.assert_irods_obj(self.obj_path2) @override_settings(REDIS_URL=INVALID_REDIS_URL) - def test_accept_lock_failure(self): - """Test accepting a delete request with redis lock failure""" + def test_post_lock_failure(self): + """Test POST with project lock failure""" self.assert_irods_obj(self.obj_path) self.assert_irods_obj(self.obj_path2) @@ -2135,8 +2154,8 @@ def setUp(self): kwargs={'project': self.project.sodar_uuid}, ) - def test_reject_admin(self): - """Test rejecting delete request as admin""" + def test_get_admin(self): + """Test IrodsDataRequestRejectView GET as admin""" self.assertEqual(IrodsDataRequest.objects.count(), 0) self.assert_irods_obj(self.obj_path) self._assert_alert_count(EVENT_REJECT, self.user, 0) @@ -2178,8 +2197,8 @@ def test_reject_admin(self): self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0) self._assert_alert_count(EVENT_REJECT, self.user_contributor, 1) - def test_reject_owner(self): - """Test rejecting delete request as owner""" + def test_get_owner(self): + """Test GET as owner""" self.assertEqual(IrodsDataRequest.objects.count(), 0) request = self.make_irods_request( project=self.project, @@ -2203,8 +2222,8 @@ def test_reject_owner(self): self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0) self._assert_alert_count(EVENT_REJECT, self.user_contributor, 1) - def test_reject_delegate(self): - """Test rejecting delete request as delegate""" + def test_get_delegate(self): + """Test GET as delegate""" self.assertEqual(IrodsDataRequest.objects.count(), 0) request = self.make_irods_request( project=self.project, @@ -2228,8 +2247,8 @@ def test_reject_delegate(self): self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0) self._assert_alert_count(EVENT_REJECT, self.user_contributor, 1) - def test_reject_contributor(self): - """Test rejecting delete request as contributor""" + def test_get_contributor(self): + """Test GET as contributor""" self.assertEqual(IrodsDataRequest.objects.count(), 0) request = self.make_irods_request( project=self.project, @@ -2257,8 +2276,8 @@ def test_reject_contributor(self): self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0) self._assert_alert_count(EVENT_REJECT, self.user_contributor, 0) - def test_reject_one_of_multiple(self): - """Test rejecting one of multiple requests""" + def test_get_one_of_multiple(self): + """Test GET with one of multiple requests""" obj_path2 = os.path.join(self.assay_path, IRODS_FILE_NAME2) self.irods.data_objects.create(obj_path2) self.assert_irods_obj(self.obj_path) @@ -2299,8 +2318,8 @@ def test_reject_one_of_multiple(self): self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0) self._assert_alert_count(EVENT_REJECT, self.user_contributor, 1) - def test_reject_no_request(self): - """Test rejecting delete request that doesn't exist""" + def test_get_no_request(self): + """Test GET with non-existing delete request""" with self.login(self.user_owner_cat): response = self.client.get( reverse( @@ -2327,8 +2346,8 @@ def setUp(self): kwargs={'project': self.project.sodar_uuid}, ) - def test_reject(self): - """Test rejecting delete request""" + def test_post(self): + """Test IrodsDataRequestRejectBatchView POST""" self.assertEqual(IrodsDataRequest.objects.count(), 0) self._assert_alert_count(EVENT_REJECT, self.user, 0) self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0) @@ -2378,8 +2397,8 @@ def test_reject(self): self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0) self._assert_alert_count(EVENT_REJECT, self.user_contributor, 0) - def test_reject_no_request(self): - """Test rejecting delete request that doesn't exist""" + def test_post_no_request(self): + """Test POST with non-existing request""" with self.login(self.user_owner_cat): response = self.client.post( self.reject_url, diff --git a/samplesheets/views.py b/samplesheets/views.py index b3550dbf..d2ae8daf 100644 --- a/samplesheets/views.py +++ b/samplesheets/views.py @@ -794,43 +794,30 @@ def __init__(self): # Timeline helpers --------------------------------------------------------- - def add_tl_create(self, irods_request): + def add_tl_event(self, irods_request, action): """ - Create timeline event for iRODS data request creation. - - :param irods_request: IrodsDataRequest object - """ - if not self.timeline: - return - tl_event = self.timeline.add_event( - project=irods_request.project, - app_name=APP_NAME, - user=irods_request.user, - event_name=IRODS_REQUEST_EVENT_CREATE, - description='create iRODS data request {irods_request}', - status_type='OK', - ) - tl_event.add_object( - obj=irods_request, - label='irods_request', - name=irods_request.get_display_name(), - ) - - def add_tl_update(self, irods_request): - """ - Create timeline event for iRODS data request update. + Create timeline event for iRODS data request modification. :param irods_request: IrodsDataRequest object + :param action: 'create', 'update' or 'delete' (string) """ if not self.timeline: return + extra_data = {} + if action in ['create', 'update']: + extra_data = { + 'action': irods_request.action, + 'path': irods_request.path, + 'description': irods_request.description, + } tl_event = self.timeline.add_event( project=irods_request.project, app_name=APP_NAME, user=irods_request.user, - event_name=IRODS_REQUEST_EVENT_UPDATE, - description='update iRODS data request {irods_request}', + event_name='irods_request_{}'.format(action), + description=action + ' iRODS data request {irods_request}', status_type='OK', + extra_data=extra_data, ) tl_event.add_object( obj=irods_request, @@ -838,28 +825,6 @@ def add_tl_update(self, irods_request): name=irods_request.get_display_name(), ) - def add_tl_delete(self, irods_request): - """ - Create timeline event for iRODS data request deletion. - - :param irods_request: IrodsDataRequest object - """ - if not self.timeline: - return - tl_event = self.timeline.add_event( - project=irods_request.project, - app_name=APP_NAME, - user=irods_request.user, - event_name=IRODS_REQUEST_EVENT_DELETE, - description='delete iRODS data request {irods_request}', - status_type='OK', - ) - tl_event.add_object( - obj=irods_request, - label='irods_request', - name=str(irods_request), - ) - # App Alert Helpers -------------------------------------------------------- def add_alerts_create(self, project, app_alerts=None): @@ -2495,7 +2460,7 @@ def form_valid(self, form): obj.project = project obj.save() # Create timeline event - self.add_tl_create(obj) + self.add_tl_event(obj, 'create') # Add app alerts to owners/delegates self.add_alerts_create(project) messages.success( @@ -2543,7 +2508,7 @@ def get_form_kwargs(self): def form_valid(self, form): obj = form.save() - self.add_tl_update(obj) + self.add_tl_event(obj, 'update') messages.success( self.request, 'iRODS data request "{}" updated.'.format(obj.get_display_name()), @@ -2582,7 +2547,7 @@ def has_permission(self): def get_success_url(self): # Add timeline event - self.add_tl_delete(self.object) + self.add_tl_event(self.object, 'delete') # Handle project alerts self.handle_alerts_deactivate(self.object) messages.success(self.request, 'iRODS data request deleted.') diff --git a/samplesheets/views_ajax.py b/samplesheets/views_ajax.py index 066a553f..a2c79e40 100644 --- a/samplesheets/views_ajax.py +++ b/samplesheets/views_ajax.py @@ -1873,7 +1873,7 @@ def post(self, request, *args, **kwargs): ) # Create timeline event - self.add_tl_create(irods_request) + self.add_tl_event(irods_request, 'create') # Add app alerts to owners/delegates self.add_alerts_create(project) return Response( @@ -1911,16 +1911,12 @@ def post(self, request, *args, **kwargs): ) # Add timeline event - self.add_tl_delete(irods_request) + self.add_tl_event(irods_request, 'delete') # Handle project alerts self.handle_alerts_deactivate(irods_request) irods_request.delete() return Response( - { - 'detail': 'ok', - 'status': None, - 'user': None, - }, + {'detail': 'ok', 'status': None, 'user': None}, status=200, ) diff --git a/samplesheets/views_api.py b/samplesheets/views_api.py index 0f7537d2..5506515a 100644 --- a/samplesheets/views_api.py +++ b/samplesheets/views_api.py @@ -588,7 +588,7 @@ class IrodsDataRequestCreateAPIView( def perform_create(self, serializer): serializer.save() # Create timeline event - self.add_tl_create(serializer.instance) + self.add_tl_event(serializer.instance, 'create') # Add app alerts to owners/delegates self.add_alerts_create(serializer.instance.project) @@ -620,7 +620,7 @@ def perform_update(self, serializer): raise PermissionDenied serializer.save() # Add timeline event - self.add_tl_update(serializer.instance) + self.add_tl_event(serializer.instance, 'update') class IrodsDataRequestDestroyAPIView( @@ -647,7 +647,7 @@ def perform_destroy(self, instance): raise PermissionDenied instance.delete() # Add timeline event - self.add_tl_delete(instance) + self.add_tl_event(instance, 'delete') # Handle project alerts self.handle_alerts_deactivate(instance) From 5cf31c7a1e9b97da041e8b6562ab1bb86067e8e3 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Wed, 28 Feb 2024 16:22:13 +0100 Subject: [PATCH 25/33] add cram support (#1908, #1914) --- CHANGELOG.rst | 3 + config/settings/base.py | 4 +- docs_manual/source/admin_settings.rst | 2 +- docs_manual/source/ext_tool_igv_opening.rst | 8 +- docs_manual/source/introduction.rst | 2 +- docs_manual/source/metadata_advanced.rst | 13 +- docs_manual/source/sodar_release_notes.rst | 12 ++ docs_manual/source/ui_project_update.rst | 6 +- samplesheets/plugins.py | 8 +- samplesheets/studyapps/cancer/plugins.py | 8 +- .../cancer/tests/test_plugins_taskflow.py | 199 ++++++++++++++++-- samplesheets/studyapps/cancer/utils.py | 13 +- samplesheets/studyapps/germline/plugins.py | 17 +- .../germline/tests/test_plugins_taskflow.py | 153 +++++++++++++- samplesheets/studyapps/germline/utils.py | 8 +- samplesheets/studyapps/tests/test_utils.py | 35 ++- samplesheets/studyapps/utils.py | 36 +++- samplesheets/tests/test_utils.py | 20 +- 18 files changed, 470 insertions(+), 77 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index f0fc32b4..08fc022c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -19,6 +19,8 @@ Added - UI tests for project details card (#1902) - **Samplesheets** - ``IrodsDataRequest`` timeline event extra data (#1912) + - CRAM file support in study apps (#1908) + - ``check_igv_file_suffix()`` helper in ``studyapps.utils`` (#1908) - **Taskflowbackend** - Django settings in siteinfo (#1901) - ``BatchSetAccessTask`` in iRODS tasks (#1905) @@ -64,6 +66,7 @@ Fixed - ``IrodsDataRequest._validate_action()`` failing with ``delete`` action (#1858) - Protocol ref editable for new row if disabled in column config (#1875) - Sheet template creation failure with slash characters in title/ID fields (#1896) + - ``get_pedigree_file_path()`` used in cancer study app tests (#1914) - **Taskflowbackend** - Hardcoded iRODS path length in ``landing_zone_move`` (#1888) - Uncaught exceptions in ``SetAccessTask`` (#1906) diff --git a/config/settings/base.py b/config/settings/base.py index d9df4e87..813a8c01 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -807,11 +807,11 @@ def set_logging(level=None): # Remote sample sheet sync interval in minutes SHEETS_SYNC_INTERVAL = env.int('SHEETS_SYNC_INTERVAL', 5) -# BAM file name suffixes to omit from study shortcuts and IGV session generation +# BAM/CRAM file name suffixes to omit from study shortcuts and IGV sessions SHEETS_IGV_OMIT_BAM = env.list( 'SHEETS_IGV_OMIT_BAM', default=['dragen_evidence.bam'] ) -# VCF file name suffixes to omit from study shortcuts and IGV session generation +# VCF file name suffixes to omit from study shortcuts and IGV sessions SHEETS_IGV_OMIT_VCF = env.list( 'SHEETS_IGV_OMIT_VCF', default=['cnv.vcf.gz', 'ploidy.vcf.gz', 'sv.vcf.gz'] ) diff --git a/docs_manual/source/admin_settings.rst b/docs_manual/source/admin_settings.rst index ec32895a..016e7e60 100644 --- a/docs_manual/source/admin_settings.rst +++ b/docs_manual/source/admin_settings.rst @@ -193,7 +193,7 @@ Sample Sheets Settings ``SHEETS_SYNC_INTERVAL`` Interval for remote sheet synchronization in minutes (integer). ``SHEETS_IGV_OMIT_BAM`` - BAM file name suffixes to omit from study shortcuts and IGV session + BAM and CRAM file name suffixes to omit from study shortcuts and IGV session generation. ``SHEETS_IGV_OMIT_VCF`` VCF file name suffixes to omit from study shortcuts and IGV session diff --git a/docs_manual/source/ext_tool_igv_opening.rst b/docs_manual/source/ext_tool_igv_opening.rst index 8512e4b2..f5bd2b61 100644 --- a/docs_manual/source/ext_tool_igv_opening.rst +++ b/docs_manual/source/ext_tool_igv_opening.rst @@ -35,8 +35,8 @@ The IGV session can be opened replacing the currently open session in IGV. Another option is to merge the session into the currently open one with the button displaying a plus icon. -BAM and VCF files can be similarly loaded into the currently open IGV session by -clicking the corresponding button with a plus icon. +BAM/CRAM and VCF files can be similarly loaded into the currently open IGV +session by clicking the corresponding button with a plus icon. .. image:: _static/ext_tool_igv/IGV_Study_Shortcuts.png :width: 75% @@ -49,8 +49,8 @@ Obtaining File URLs You can also obtain URLs into the SODAR file serving sub system. The donor or pedigree names in the IGV links window depicted above link directly -to the BAM and VCF files in the file serving sub system. Right-click the link -and use the "copy link location" context menu entry to copy the File URL. +to the BAM/CRAM and VCF files in the file serving sub system. Right-click the +link and use the "copy link location" context menu entry to copy the File URL. You can then open this link in IGV using the "File" > "Load from URL" command. Paste the URL that you just copied into the "File URL" field of the window that diff --git a/docs_manual/source/introduction.rst b/docs_manual/source/introduction.rst index e03b93ae..2bc1cee6 100644 --- a/docs_manual/source/introduction.rst +++ b/docs_manual/source/introduction.rst @@ -81,7 +81,7 @@ Notable Features * Sample sheet generation from templates * Sample sheet editing and version control * Sample sheet export in ISA-Tab and Excel formats - * Automated iRODS shortcut generation for e.g. BAM/VCF files + * Automated iRODS shortcut generation for BAM/CRAM/VCF files * Automated Integrative Genomics Viewer (IGV) session file generation and merging * Track hub management for UCSC Genome Browser integration diff --git a/docs_manual/source/metadata_advanced.rst b/docs_manual/source/metadata_advanced.rst index a5c0cd5a..774b0701 100644 --- a/docs_manual/source/metadata_advanced.rst +++ b/docs_manual/source/metadata_advanced.rst @@ -26,11 +26,13 @@ SODAR currently supports the following study configurations: - **Cancer** * Indended for: Cancer studies * Configuration: ``bih_cancer`` - * Provides: **sample specific** shortcuts to the latest BAM and VCF files. + * Provides: **sample specific** shortcuts to the latest BAM/CRAM and VCF + files. - **Germline** * Indended for: Germline studies * Configuration: ``bih_germline`` - * Provides: **pedigree specific** shortcuts to the latest BAM and VCF files. + * Provides: **pedigree specific** shortcuts to the latest BAM/CRAM and VCF + files. If the configuration is not specified or is not known to SODAR, the shortcut column will not be visible. @@ -44,11 +46,12 @@ Placement of files in subcollections under the row-specific collection does not affect study shortcuts or IGV session inclusion. This also means files can be freely organized within desired subcollections. -If there is need to omit BAM or VCF files with certain file name patterns from -shortcuts and IGV sessions, file name suffixes can be defined in the +If there is need to omit BAM/CRAM or VCF files with certain file name patterns +from shortcuts and IGV sessions, file name suffixes can be defined in the :ref:`project update view `. Administrators can also define site-wide file suffixes to omit via :ref:`Django settings ` -under ``SHEETS_IGV_OMIT_BAM`` and ``SHEETS_IGV_OMIT_VCF``. +under ``SHEETS_IGV_OMIT_BAM`` (also affects CRAM files) and +``SHEETS_IGV_OMIT_VCF``. Assay iRODS Data Linking diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 4af59d98..97409657 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -13,6 +13,7 @@ v0.14.2 (WIP) Release for minor updates, maintenance and bug fixes. +- Add CRAM file support for cancer/germline study links and IGV sessions - Add assay plugin shortcut collection creation in landing zones - Add iRODS data request details in timeline events - Add landing zone statistics in siteinfo @@ -25,6 +26,17 @@ Release for minor updates, maintenance and bug fixes. - Minor updates and bug fixes - Upgrade to SODAR Core v0.13.4 +Migration Guide +--------------- + +CRAM File Support + This release adds support for CRAM files. They are linked in studies and IGV + sessions similar to BAM files. If your project contains CRAM files uploaded + prior to this release, you will have to run :guilabel:`Update Sheet Cache` + in the Sample Sheets app to enable the files in study links and generated + IGV sessions. Alternatively, an administrator can run the ``synccache`` + management command to update all projects or a specific project. + v0.14.1 (2023-12-12) ==================== diff --git a/docs_manual/source/ui_project_update.rst b/docs_manual/source/ui_project_update.rst index 7161e970..5f387d3b 100644 --- a/docs_manual/source/ui_project_update.rst +++ b/docs_manual/source/ui_project_update.rst @@ -53,9 +53,9 @@ Token for Sheet Synchronization IGV session genome Genome used in generating IGV session files for the project. The name needs to be in a format accepted by IGV. Affects cancer and germline projects. -BAM files to omit from IGV sessions - Comma separated list of BAM file suffixes to omit from IGV sessions. - Overrides site-wide setting, affects cancer and germline projects. +BAM and CRAM files to omit from IGV sessions + Comma separated list of BAM and CRAM file suffixes to omit from IGV + sessions. Overrides site-wide setting, affects cancer and germline projects. VCF files to omit from IGV sessions Comma separated list of VCF file suffixes to omit from IGV sessions. Overrides site-wide setting, affects cancer and germline projects. diff --git a/samplesheets/plugins.py b/samplesheets/plugins.py index d4959049..b689fe5f 100644 --- a/samplesheets/plugins.py +++ b/samplesheets/plugins.py @@ -213,12 +213,12 @@ class ProjectAppPlugin( 'igv_omit_bam': { 'scope': SODAR_CONSTANTS['APP_SETTING_SCOPE_PROJECT'], 'type': 'STRING', - 'label': 'BAM files to omit from IGV sessions', + 'label': 'BAM and CRAM files to omit from IGV sessions', 'default': '', 'placeholder': ', '.join(settings.SHEETS_IGV_OMIT_BAM), - 'description': 'Comma separated list of BAM file suffixes to omit ' - 'from generated IGV sessions. Overrides site-wide setting, affects ' - 'cancer and germline projects.', + 'description': 'Comma separated list of BAM and CRAM file suffixes ' + 'to omit from generated IGV sessions. Overrides site-wide setting, ' + 'affects cancer and germline projects.', 'user_modifiable': True, }, 'igv_omit_vcf': { diff --git a/samplesheets/studyapps/cancer/plugins.py b/samplesheets/studyapps/cancer/plugins.py index 039acd83..05c77016 100644 --- a/samplesheets/studyapps/cancer/plugins.py +++ b/samplesheets/studyapps/cancer/plugins.py @@ -85,8 +85,8 @@ def get_shortcut_column(self, study, study_tables): 'files': { 'type': 'modal', 'icon': 'mdi:folder-open-outline', - 'title': 'View links to BAM, VCF and IGV session files ' - 'for the case', + 'title': 'View links to BAM/CRAM, VCF and IGV session ' + 'files for the case', }, }, 'data': [], @@ -172,7 +172,7 @@ def get_shortcut_links(self, study, study_tables, **kwargs): 'title': 'Case-Wise Links for {}'.format(case_id), 'data': { 'session': {'title': 'IGV Session File', 'files': []}, - 'bam': {'title': 'BAM Files', 'files': []}, + 'bam': {'title': 'BAM/CRAM Files', 'files': []}, 'vcf': {'title': 'VCF Files', 'files': []}, }, } @@ -190,7 +190,7 @@ def _add_lib_path(library_name, file_type): 'extra_links': [ { 'label': 'Add {} file to IGV'.format( - file_type.upper() + 'BAM/CRAM' if file_type == 'bam' else 'VCF' ), 'icon': 'mdi:plus-thick', 'url': get_igv_irods_url(path, merge=True), diff --git a/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py b/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py index ac4604a9..e85f30cc 100644 --- a/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py +++ b/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py @@ -3,6 +3,7 @@ import os from django.conf import settings +from django.test import override_settings from django.urls import reverse # Projectroles dependency @@ -18,7 +19,7 @@ from samplesheets.tests.test_models import SampleSheetModelMixin from samplesheets.tests.test_views_taskflow import SampleSheetTaskflowMixin from samplesheets.plugins import SampleSheetStudyPluginPoint -from samplesheets.studyapps.germline.utils import get_pedigree_file_path +from samplesheets.studyapps.cancer.utils import get_library_file_path from samplesheets.studyapps.utils import get_igv_session_url @@ -143,6 +144,28 @@ def test_get_shortcut_column_files(self): self.assertEqual(sc['data'][i]['igv']['enabled'], False) self.assertEqual(sc['data'][i]['files']['enabled'], False) + def test_get_shortcut_column_cram(self): + """Test get_shortcut_column() with CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path = os.path.join( + self.source_path, '{}_test.cram'.format(SAMPLE_ID_NORMAL) + ) + vcf_path = os.path.join( + self.source_path, '{}_test.vcf.gz'.format(SAMPLE_ID_NORMAL) + ) + self.irods.data_objects.create(cram_path) + self.irods.data_objects.create(vcf_path) + + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sc = self.plugin.get_shortcut_column(self.study, study_tables) + + self.assertEqual(sc['data'][0]['igv']['enabled'], True) + self.assertEqual(sc['data'][0]['files']['enabled'], True) + for i in range(1, 4): + self.assertEqual(sc['data'][i]['igv']['enabled'], False) + self.assertEqual(sc['data'][i]['files']['enabled'], False) + def test_get_shortcut_links(self): """Test get_shortcut_links() with no cache item or files""" study_tables = self.tb.build_study_tables(self.study) @@ -197,21 +220,84 @@ def test_get_shortcut_links_files(self): self.assertEqual( sl['data']['bam']['files'][0]['url'], settings.IRODS_WEBDAV_URL - + get_pedigree_file_path('bam', self.source, study_tables), + + get_library_file_path( + self.assay, + LIBRARY_ID_NORMAL, + 'bam', + self.irods_backend, + self.irods, + ), + ) + self.assertEqual( + sl['data']['vcf']['files'][0]['url'], + settings.IRODS_WEBDAV_URL + + get_library_file_path( + self.assay, + LIBRARY_ID_NORMAL, + 'vcf', + self.irods_backend, + self.irods, + ), + ) + + def test_get_shortcut_links_cram(self): + """Test get_shortcut_links() with CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path = os.path.join( + self.source_path, '{}_test.cram'.format(SAMPLE_ID_NORMAL) + ) + vcf_path = os.path.join( + self.source_path, '{}_test.vcf.gz'.format(SAMPLE_ID_NORMAL) + ) + self.irods.data_objects.create(cram_path) + self.irods.data_objects.create(vcf_path) + + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sl = self.plugin.get_shortcut_links( + self.study, study_tables, case=[SOURCE_ID_NORMAL] + ) + + self.assertEqual(len(sl['data']['session']['files']), 1) + self.assertEqual(len(sl['data']['bam']['files']), 1) + self.assertEqual(len(sl['data']['vcf']['files']), 1) + self.assertEqual( + sl['data']['session']['files'][0]['url'], + reverse( + 'samplesheets.studyapps.cancer:igv', + kwargs={'genericmaterial': self.source.sodar_uuid}, + ), + ) + self.assertEqual( + sl['data']['bam']['files'][0]['url'], + settings.IRODS_WEBDAV_URL + + get_library_file_path( + self.assay, + LIBRARY_ID_NORMAL, + 'bam', + self.irods_backend, + self.irods, + ), ) self.assertEqual( sl['data']['vcf']['files'][0]['url'], settings.IRODS_WEBDAV_URL - + get_pedigree_file_path('vcf', self.source, study_tables), + + get_library_file_path( + self.assay, + LIBRARY_ID_NORMAL, + 'vcf', + self.irods_backend, + self.irods, + ), ) def test_get_shortcut_links_multiple(self): """Test get_shortcut_links() with multiple BAM/VCF files""" self.irods.collections.create(self.source_path) - bam_path = os.path.join( - self.source_path, '{}_test_2022-11-06.bam'.format(SAMPLE_ID_NORMAL) + cram_path = os.path.join( + self.source_path, '{}_test_2022-11-06.cram'.format(SAMPLE_ID_NORMAL) ) - bam_path2 = os.path.join( + bam_path = os.path.join( self.source_path, '{}_test_2022-11-07.bam'.format(SAMPLE_ID_NORMAL) ) vcf_path = os.path.join( @@ -222,8 +308,8 @@ def test_get_shortcut_links_multiple(self): self.source_path, '{}_test.vcf_2022-11-07.vcf.gz'.format(SAMPLE_ID_NORMAL), ) + self.irods.data_objects.create(cram_path) self.irods.data_objects.create(bam_path) - self.irods.data_objects.create(bam_path2) self.irods.data_objects.create(vcf_path) self.irods.data_objects.create(vcf_path2) @@ -237,14 +323,12 @@ def test_get_shortcut_links_multiple(self): # Only the most recent bam and vcf should be returned self.assertEqual(len(sl['data']['bam']['files']), 1) self.assertEqual(len(sl['data']['vcf']['files']), 1) - self.assertTrue( - sl['data']['bam']['files'][0]['url'].endswith(bam_path2) - ) + self.assertTrue(sl['data']['bam']['files'][0]['url'].endswith(bam_path)) self.assertTrue( sl['data']['vcf']['files'][0]['url'].endswith(vcf_path2) ) - def test_get_shortcut_links_bam(self): + def test_get_shortcut_links_bam_only(self): """Test get_shortcut_links() with BAM file only""" self.irods.collections.create(self.source_path) bam_path = os.path.join( @@ -260,7 +344,23 @@ def test_get_shortcut_links_bam(self): self.assertEqual(len(sl['data']['bam']['files']), 1) self.assertEqual(len(sl['data']['vcf']['files']), 0) - def test_get_shortcut_links_vcf(self): + def test_get_shortcut_links_cram_only(self): + """Test get_shortcut_links() with CRAM file only""" + self.irods.collections.create(self.source_path) + cram_path = os.path.join( + self.source_path, '{}_test.cram'.format(SAMPLE_ID_NORMAL) + ) + self.irods.data_objects.create(cram_path) + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sl = self.plugin.get_shortcut_links( + self.study, study_tables, case=[SOURCE_ID_NORMAL] + ) + self.assertEqual(len(sl['data']['session']['files']), 1) + self.assertEqual(len(sl['data']['bam']['files']), 1) + self.assertEqual(len(sl['data']['vcf']['files']), 0) + + def test_get_shortcut_links_vcf_only(self): """Test get_shortcut_links() with VCF file only""" self.irods.collections.create(self.source_path) vcf_path = os.path.join( @@ -332,12 +432,24 @@ def test_get_shortcut_links_files_omit(self): self.assertEqual( sl['data']['bam']['files'][0]['url'], settings.IRODS_WEBDAV_URL - + get_pedigree_file_path('bam', self.source, study_tables), + + get_library_file_path( + self.assay, + LIBRARY_ID_NORMAL, + 'bam', + self.irods_backend, + self.irods, + ), ) self.assertEqual( sl['data']['vcf']['files'][0]['url'], settings.IRODS_WEBDAV_URL - + get_pedigree_file_path('vcf', self.source, study_tables), + + get_library_file_path( + self.assay, + LIBRARY_ID_NORMAL, + 'vcf', + self.irods_backend, + self.irods, + ), ) def test_get_shortcut_links_files_omit_only(self): @@ -360,6 +472,23 @@ def test_get_shortcut_links_files_omit_only(self): self.assertEqual(len(sl['data']['bam']['files']), 0) self.assertEqual(len(sl['data']['vcf']['files']), 0) + @override_settings(SHEETS_IGV_OMIT_BAM=['omit.cram']) + def test_get_shortcut_links_files_omit_only_cram(self): + """Test get_shortcut_links() with omittable CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path_omit = os.path.join( + self.source_path, '{}_omit.cram'.format(SAMPLE_ID_NORMAL) + ) + self.irods.data_objects.create(cram_path_omit) + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sl = self.plugin.get_shortcut_links( + self.study, study_tables, case=[SOURCE_ID_NORMAL] + ) + self.assertEqual(len(sl['data']['session']['files']), 0) + self.assertEqual(len(sl['data']['bam']['files']), 0) + self.assertEqual(len(sl['data']['vcf']['files']), 0) + def test_get_shortcut_links_files_omit_override(self): """Test get_shortcut_links() with project-specific omit override""" app_settings.set( @@ -425,6 +554,27 @@ def test_update_cache_files(self): self.assertEqual(ci['bam'][CASE_IDS[i]], None) self.assertEqual(ci['vcf'][CASE_IDS[i]], None) + def test_update_cache_files_cram(self): + """Test update_cache() with CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path = os.path.join( + self.source_path, '{}_test.cram'.format(SAMPLE_ID_NORMAL) + ) + vcf_path = os.path.join( + self.source_path, '{}_test.vcf.gz'.format(SAMPLE_ID_NORMAL) + ) + self.irods.data_objects.create(cram_path) + self.irods.data_objects.create(vcf_path) + self.plugin.update_cache(self.cache_name, self.project) + ci = self.cache_backend.get_cache_item( + APP_NAME, self.cache_name, self.project + ).data + self.assertEqual(ci['bam'][CASE_IDS[0]], cram_path) + self.assertEqual(ci['vcf'][CASE_IDS[0]], vcf_path) + for i in range(1, len(CASE_IDS) - 1): + self.assertEqual(ci['bam'][CASE_IDS[i]], None) + self.assertEqual(ci['vcf'][CASE_IDS[i]], None) + def test_update_cache_files_omit(self): """Test update_cache() with omittable files in iRODS""" self.irods.collections.create(self.source_path) @@ -458,7 +608,6 @@ def test_update_cache_files_omit(self): def test_update_cache_files_omit_only(self): """Test update_cache() with only omittable files in iRODS""" self.irods.collections.create(self.source_path) - # Create omittable files which come before real ones alphabetically bam_path_omit = os.path.join( self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID_NORMAL) ) @@ -475,6 +624,26 @@ def test_update_cache_files_omit_only(self): self.assertEqual(ci['bam'][CASE_IDS[i]], None) self.assertEqual(ci['vcf'][CASE_IDS[i]], None) + @override_settings(SHEETS_IGV_OMIT_BAM=['omit.cram']) + def test_update_cache_files_omit_only_cram(self): + """Test update_cache() with omittable CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path_omit = os.path.join( + self.source_path, '{}_omit.cram'.format(SAMPLE_ID_NORMAL) + ) + vcf_path_omit = os.path.join( + self.source_path, '{}_cnv.vcf.gz'.format(SAMPLE_ID_NORMAL) + ) + self.irods.data_objects.create(cram_path_omit) + self.irods.data_objects.create(vcf_path_omit) + self.plugin.update_cache(self.cache_name, self.project) + ci = self.cache_backend.get_cache_item( + APP_NAME, self.cache_name, self.project + ).data + for i in range(0, len(CASE_IDS) - 1): + self.assertEqual(ci['bam'][CASE_IDS[i]], None) + self.assertEqual(ci['vcf'][CASE_IDS[i]], None) + def test_update_cache_files_omit_override(self): """Test update_cache() with project-specific omit override""" app_settings.set( diff --git a/samplesheets/studyapps/cancer/utils.py b/samplesheets/studyapps/cancer/utils.py index 048d2e96..a5c38c75 100644 --- a/samplesheets/studyapps/cancer/utils.py +++ b/samplesheets/studyapps/cancer/utils.py @@ -4,8 +4,8 @@ from samplesheets.studyapps.utils import ( get_igv_omit_override, + check_igv_file_suffix, check_igv_file_name, - FILE_TYPE_SUFFIXES, ) from samplesheets.utils import get_latest_file_path @@ -13,11 +13,11 @@ def get_library_file_path(assay, library_name, file_type, irods_backend, irods): """ Return iRODS path for the most recent file of type "bam" or "vcf" - linked to the library. + linked to the library. CRAM files are included in "bam" searches. :param assay: Assay object :param library_name: Library name (string) - :param file_type: String ("bam" or "vcf") + :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM) :param irods_backend: IrodsAPI object :param irods: IRODSSession object :return: String @@ -29,9 +29,10 @@ def get_library_file_path(assay, library_name, file_type, irods_backend, irods): try: obj_list = irods_backend.get_objects(irods, query_path) for obj in obj_list: - if obj['name'].lower().endswith( - FILE_TYPE_SUFFIXES[file_type] - ) and check_igv_file_name(obj['name'], file_type, override): + obj_name = obj['name'].lower() + if check_igv_file_suffix( + obj_name, file_type + ) and check_igv_file_name(obj_name, file_type, override): file_paths.append(obj['path']) except Exception: pass diff --git a/samplesheets/studyapps/germline/plugins.py b/samplesheets/studyapps/germline/plugins.py index d2684f21..3e90f83a 100644 --- a/samplesheets/studyapps/germline/plugins.py +++ b/samplesheets/studyapps/germline/plugins.py @@ -15,6 +15,7 @@ from samplesheets.rendering import SampleSheetTableBuilder from samplesheets.studyapps.utils import ( get_igv_omit_override, + check_igv_file_suffix, check_igv_file_name, get_igv_session_url, get_igv_irods_url, @@ -86,8 +87,8 @@ def get_shortcut_column(self, study, study_tables): 'files': { 'type': 'modal', 'icon': 'mdi:folder-open-outline', - 'title': 'View links to pedigree BAM, VCF and IGV session ' - 'files', + 'title': 'View links to pedigree BAM/CRAM, VCF and IGV ' + 'session files', }, }, 'data': [], @@ -179,7 +180,7 @@ def get_shortcut_links(self, study, study_tables, **kwargs): 'title': 'Pedigree-Wise Links for {}'.format(query_id), 'data': { 'session': {'title': 'IGV Session File', 'files': []}, - 'bam': {'title': 'BAM Files', 'files': []}, + 'bam': {'title': 'BAM/CRAM Files', 'files': []}, 'vcf': {'title': 'VCF File', 'files': []}, }, } @@ -197,7 +198,7 @@ def get_shortcut_links(self, study, study_tables, **kwargs): project=study.get_project(), ) - # BAM links + # BAM/CRAM links for source in sources: # Use cached value if present if cache_item and source.name in cache_item.data['bam']: @@ -211,10 +212,10 @@ def get_shortcut_links(self, study, study_tables, **kwargs): { 'label': source.name, 'url': webdav_url + bam_path, - 'title': 'Download BAM file', + 'title': 'Download BAM/CRAM file', 'extra_links': [ { - 'label': 'Add BAM file to IGV', + 'label': 'Add BAM/CRAM file to IGV', 'icon': 'mdi:plus-thick', 'url': get_igv_irods_url(bam_path, merge=True), } @@ -342,7 +343,7 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend): source_name = row[0]['value'] if source_name not in bam_paths: bam_paths[source_name] = [] - # Add BAM objects + # Add BAM/CRAM objects path = assay_plugin.get_row_path( row, assay_table, assay, assay_path ) @@ -351,7 +352,7 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend): o['path'] for o in study_objs if o['path'].startswith(path + '/') - and o['name'].lower().endswith('bam') + and check_igv_file_suffix(o['name'], 'bam') and check_igv_file_name(o['name'], 'bam', bam_override) ] row_fam = row[fam_idx]['value'] diff --git a/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py b/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py index db4e8f89..7fc66a7a 100644 --- a/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py +++ b/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py @@ -3,6 +3,7 @@ import os from django.conf import settings +from django.test import override_settings from django.urls import reverse # Projectroles dependency @@ -140,6 +141,29 @@ def test_get_shortcut_column_files(self): self.assertEqual(sc['data'][i]['igv']['enabled'], False) self.assertEqual(sc['data'][i]['files']['enabled'], False) + def test_get_shortcut_column_cram_file(self): + """Test get_shortcut_column() with cache item and CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path = os.path.join( + self.source_path, '{}_test.cram'.format(SAMPLE_ID) + ) + vcf_path = os.path.join( + self.source_path, '{}_test.vcf.gz'.format(FAMILY_ID) + ) + self.irods.data_objects.create(cram_path) + self.irods.data_objects.create(vcf_path) + + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sc = self.plugin.get_shortcut_column(self.study, study_tables) + + for i in range(0, 2): + self.assertEqual(sc['data'][i]['igv']['enabled'], True) + self.assertEqual(sc['data'][i]['files']['enabled'], True) + for i in range(3, 5): + self.assertEqual(sc['data'][i]['igv']['enabled'], False) + self.assertEqual(sc['data'][i]['files']['enabled'], False) + def test_get_shortcut_column_parent_vcf(self): """Test get_shortcut_column() with VCF file in a parent's collection""" self.irods.collections.create(self.parent_path) @@ -266,14 +290,53 @@ def test_get_shortcut_links_files(self): + get_pedigree_file_path('vcf', self.source, study_tables), ) + def test_get_shortcut_links_files_cram(self): + """Test get_shortcut_links() with CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path = os.path.join( + self.source_path, '{}_test.cram'.format(SAMPLE_ID) + ) + vcf_path = os.path.join( + self.source_path, '{}_test.vcf.gz'.format(FAMILY_ID) + ) + self.irods.data_objects.create(cram_path) + self.irods.data_objects.create(vcf_path) + + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sl = self.plugin.get_shortcut_links( + self.study, study_tables, family=[FAMILY_ID] + ) + + self.assertEqual(len(sl['data']['session']['files']), 1) + self.assertEqual(len(sl['data']['bam']['files']), 1) + self.assertEqual(len(sl['data']['vcf']['files']), 1) + self.assertEqual( + sl['data']['session']['files'][0]['url'], + reverse( + 'samplesheets.studyapps.germline:igv', + kwargs={'genericmaterial': self.source.sodar_uuid}, + ), + ) + self.assertEqual( + sl['data']['bam']['files'][0]['url'], + settings.IRODS_WEBDAV_URL + + get_pedigree_file_path('bam', self.source, study_tables), + ) + self.assertEqual( + sl['data']['vcf']['files'][0]['url'], + settings.IRODS_WEBDAV_URL + + get_pedigree_file_path('vcf', self.source, study_tables), + ) + def test_get_shortcut_links_multiple(self): """Test get_shortcut_links() with multiple BAM/VCF files""" self.irods.collections.create(self.source_path) bam_path = os.path.join( self.source_path, '{}_test_2022-11-06.bam'.format(SAMPLE_ID) ) - bam_path2 = os.path.join( - self.source_path, '{}_test_2022-11-07.bam'.format(SAMPLE_ID) + cram_path = os.path.join( + self.source_path, '{}_test_2022-11-07.cram'.format(SAMPLE_ID) ) vcf_path = os.path.join( self.source_path, '{}_test.vcf_2022-11-06.vcf.gz'.format(FAMILY_ID) @@ -282,7 +345,7 @@ def test_get_shortcut_links_multiple(self): self.source_path, '{}_test.vcf_2022-11-07.vcf.gz'.format(FAMILY_ID) ) self.irods.data_objects.create(bam_path) - self.irods.data_objects.create(bam_path2) + self.irods.data_objects.create(cram_path) self.irods.data_objects.create(vcf_path) self.irods.data_objects.create(vcf_path2) @@ -293,17 +356,17 @@ def test_get_shortcut_links_multiple(self): ) self.assertEqual(len(sl['data']['session']['files']), 1) - # Only the most recent bam and vcf should be returned + # Only the most recent bam/cram and vcf should be returned self.assertEqual(len(sl['data']['bam']['files']), 1) self.assertEqual(len(sl['data']['vcf']['files']), 1) self.assertTrue( - sl['data']['bam']['files'][0]['url'].endswith(bam_path2) + sl['data']['bam']['files'][0]['url'].endswith(cram_path) ) self.assertTrue( sl['data']['vcf']['files'][0]['url'].endswith(vcf_path2) ) - def test_get_shortcut_links_bam(self): + def test_get_shortcut_links_bam_only(self): """Test get_shortcut_links() with BAM file only""" self.irods.collections.create(self.source_path) bam_path = os.path.join( @@ -319,7 +382,23 @@ def test_get_shortcut_links_bam(self): self.assertEqual(len(sl['data']['bam']['files']), 1) self.assertEqual(len(sl['data']['vcf']['files']), 0) - def test_get_shortcut_links_vcf(self): + def test_get_shortcut_links_cram_only(self): + """Test get_shortcut_links() with CRAM file only""" + self.irods.collections.create(self.source_path) + cram_path = os.path.join( + self.source_path, '{}_test.cram'.format(SAMPLE_ID) + ) + self.irods.data_objects.create(cram_path) + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sl = self.plugin.get_shortcut_links( + self.study, study_tables, family=[FAMILY_ID] + ) + self.assertEqual(len(sl['data']['session']['files']), 1) + self.assertEqual(len(sl['data']['bam']['files']), 1) + self.assertEqual(len(sl['data']['vcf']['files']), 0) + + def test_get_shortcut_links_vcf_only(self): """Test get_shortcut_links() with VCF file only""" self.irods.collections.create(self.source_path) vcf_path = os.path.join( @@ -419,6 +498,28 @@ def test_get_shortcut_links_files_omit_only(self): self.assertEqual(len(sl['data']['bam']['files']), 0) self.assertEqual(len(sl['data']['vcf']['files']), 0) + @override_settings(SHEETS_IGV_OMIT_BAM=['omit.cram']) + def test_get_shortcut_links_files_omit_cram(self): + """Test get_shortcut_links() with omittable CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path_omit = os.path.join( + self.source_path, '{}_omit.cram'.format(SAMPLE_ID) + ) + vcf_path_omit = os.path.join( + self.source_path, '{}_cnv.vcf.gz'.format(FAMILY_ID) + ) + self.irods.data_objects.create(cram_path_omit) + self.irods.data_objects.create(vcf_path_omit) + + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sl = self.plugin.get_shortcut_links( + self.study, study_tables, family=[FAMILY_ID] + ) + self.assertEqual(len(sl['data']['session']['files']), 0) + self.assertEqual(len(sl['data']['bam']['files']), 0) + self.assertEqual(len(sl['data']['vcf']['files']), 0) + def test_get_shortcut_links_files_omit_override(self): """Test get_shortcut_links() with project-specific omit override""" app_settings.set( @@ -483,6 +584,25 @@ def test_update_cache_files(self): self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path) self.assertIsNone(ci['vcf'][FAMILY_ID2]) + def test_update_cache_files_cram(self): + """Test update_cache() with CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path = os.path.join( + self.source_path, '{}_test.cram'.format(SAMPLE_ID) + ) + vcf_path = os.path.join( + self.source_path, '{}_test.vcf.gz'.format(FAMILY_ID) + ) + self.irods.data_objects.create(cram_path) + self.irods.data_objects.create(vcf_path) + self.plugin.update_cache(self.cache_name, self.project) + ci = self.cache_backend.get_cache_item( + APP_NAME, self.cache_name, self.project + ).data + self.assertEqual(ci['bam'][self.source.name], cram_path) + self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path) + self.assertIsNone(ci['vcf'][FAMILY_ID2]) + def test_update_cache_files_omit(self): """Test update_cache() with omittable files in iRODS""" self.irods.collections.create(self.source_path) @@ -530,6 +650,25 @@ def test_update_cache_files_omit_only(self): self.assertIsNone(ci['bam'][self.source.name]) self.assertIsNone(ci['vcf'][FAMILY_ID]) + @override_settings(SHEETS_IGV_OMIT_BAM=['omit.cram']) + def test_update_cache_files_omit_only_cram(self): + """Test update_cache() with omittable CRAM file in iRODS""" + self.irods.collections.create(self.source_path) + cram_path_omit = os.path.join( + self.source_path, '{}_omit.cram'.format(SAMPLE_ID) + ) + vcf_path_omit = os.path.join( + self.source_path, '{}_cnv.vcf.gz'.format(FAMILY_ID) + ) + self.irods.data_objects.create(cram_path_omit) + self.irods.data_objects.create(vcf_path_omit) + self.plugin.update_cache(self.cache_name, self.project) + ci = self.cache_backend.get_cache_item( + APP_NAME, self.cache_name, self.project + ).data + self.assertIsNone(ci['bam'][self.source.name]) + self.assertIsNone(ci['vcf'][FAMILY_ID]) + def test_update_cache_files_omit_override(self): """Test update_cache() with project-specific omit override""" app_settings.set( diff --git a/samplesheets/studyapps/germline/utils.py b/samplesheets/studyapps/germline/utils.py index 3038488e..1c526d3f 100644 --- a/samplesheets/studyapps/germline/utils.py +++ b/samplesheets/studyapps/germline/utils.py @@ -9,7 +9,7 @@ from samplesheets.studyapps.utils import ( get_igv_omit_override, check_igv_file_name, - FILE_TYPE_SUFFIXES, + check_igv_file_suffix, ) from samplesheets.utils import get_index_by_header @@ -22,7 +22,7 @@ def get_pedigree_file_path(file_type, source, study_tables): Return iRODS path for the most recent file of type "bam" or "vcf" linked to the source. - :param file_type: String ("bam" or "vcf") + :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM) :param source: GenericMaterial of type SOURCE :param study_tables: Render study tables :return: String @@ -84,9 +84,7 @@ def _get_val_by_index(row, idx): for obj in obj_list: if ( obj['path'].startswith(query_path + '/') - and obj['name'] - .lower() - .endswith(FILE_TYPE_SUFFIXES[file_type]) + and check_igv_file_suffix(obj['name'], file_type) and check_igv_file_name(obj['name'], file_type, override) ): file_paths.append(obj['path']) diff --git a/samplesheets/studyapps/tests/test_utils.py b/samplesheets/studyapps/tests/test_utils.py index a485d999..3eacfaaf 100644 --- a/samplesheets/studyapps/tests/test_utils.py +++ b/samplesheets/studyapps/tests/test_utils.py @@ -14,7 +14,7 @@ from projectroles.tests.test_models import ProjectMixin, RoleAssignmentMixin from samplesheets.plugins import IGV_DEFAULT_GENOME -from samplesheets.studyapps.utils import get_igv_xml +from samplesheets.studyapps.utils import check_igv_file_suffix, get_igv_xml from samplesheets.tests.test_io import SampleSheetIOMixin @@ -41,6 +41,39 @@ def setUp(self): ) +class TestCheckIGVFileSuffix(TestCase): + """Tests for check_igv_file_suffix()""" + + def test_check_bam(self): + """Test checking with BAM type and valid name""" + self.assertTrue(check_igv_file_suffix('test.bam', 'bam')) + + def test_check_bam_invalid_name(self): + """Test checking with BAM type and invalid name""" + self.assertFalse(check_igv_file_suffix('test.vcf', 'bam')) + + def test_check_bam_cram(self): + """Test checking with BAM type and CRAM name""" + self.assertTrue(check_igv_file_suffix('test.cram', 'bam')) + + def test_check_bam_uppercase(self): + """Test checking with BAM type and uppercase name""" + self.assertTrue(check_igv_file_suffix('TEST.BAM', 'bam')) + + def test_check_vcf(self): + """Test checking with VCF type and valid name""" + self.assertTrue(check_igv_file_suffix('test.vcf.gz', 'vcf')) + + def test_check_vcf_no_gz(self): + """Test checking with VCF type and name without .gz suffix""" + self.assertFalse(check_igv_file_suffix('test.vcf', 'vcf')) + + def test_check_invalid_type(self): + """Test checking with invalid_type""" + with self.assertRaises(ValueError): + check_igv_file_suffix('test.bam', 'INVALID') + + class TestGetIGVXML(TestStudyAppUtilsBase): """Tests for get_igv_xml()""" diff --git a/samplesheets/studyapps/utils.py b/samplesheets/studyapps/utils.py index cb486841..335aa49a 100644 --- a/samplesheets/studyapps/utils.py +++ b/samplesheets/studyapps/utils.py @@ -17,6 +17,7 @@ # Constants IGV_URL_BASE = 'http://127.0.0.1:60151' FILE_TYPE_SUFFIXES = {'bam': '.bam', 'vcf': '.vcf.gz'} +FILE_TYPE_SUFFIX_CRAM = '.cram' # Special case grouped together with bam INVALID_TYPE_MSG = 'Invalid value for file_type' @@ -26,7 +27,7 @@ def get_igv_omit_override(project, file_type): method to avoid redundant database queries. :param project: Project object - :param file_type: String ("bam" or "vcf") + :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM) :return: List or None """ ft = file_type.lower() @@ -40,19 +41,37 @@ def get_igv_omit_override(project, file_type): return None +def check_igv_file_suffix(file_name, file_type): + """ + Check if file name corresponds to the specified file type. + + :param file_name: String + :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM) + :raise: ValueError if file_type is incorrect + :return: Boolean (True if suffix matches the file type) + """ + if file_type.lower() not in ['bam', 'vcf']: + raise ValueError(INVALID_TYPE_MSG) + fn = file_name.lower() # Just in case suffix is in upper case + return ( + fn.endswith(FILE_TYPE_SUFFIXES[file_type]) + or file_type == 'bam' + and fn.endswith(FILE_TYPE_SUFFIX_CRAM) + ) + + def check_igv_file_name(file_name, file_type, override=None): """ Check if file is acceptable for IGV session inclusion. Returns False if suffix is found in env vars of omittable files. :param file_name: String - :param file_type: String ("bam" or "vcf") + :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM) :param override: File suffixes to override site-wide setting (list or None) :raise: ValueError if file_type is incorrect :return: Boolean (True if name is OK) """ - ft = file_type.lower() - if ft not in ['bam', 'vcf']: + if file_type.lower() not in ['bam', 'vcf']: raise ValueError(INVALID_TYPE_MSG) fn = file_name.lower() if override: @@ -79,10 +98,7 @@ def get_igv_session_url(source, app_name, merge=False): '{}:igv'.format(app_name), kwargs={'genericmaterial': source.sodar_uuid} ) return '{}/load?merge={}&file={}{}.xml'.format( - IGV_URL_BASE, - str(merge).lower(), - file_prefix, - file_url, + IGV_URL_BASE, str(merge).lower(), file_prefix, file_url ) @@ -104,7 +120,7 @@ def get_igv_xml(project, bam_urls, vcf_urls, vcf_title, request, string=True): Build IGV session XML file. :param project: Project object - :param bam_urls: BAM file URLs (dict {name: url}) + :param bam_urls: BAM/CRAM file URLs (dict {name: url}) :param vcf_urls: VCF file URLs (dict {name: url}) :param vcf_title: VCF title to prefix to VCF title strings (string) :param request: Django request @@ -164,7 +180,7 @@ def get_igv_xml(project, bam_urls, vcf_urls, vcf_title, request, string=True): 'windowFunction': 'count', }, ) - # BAM panels + # BAM/CRAM panels for bam_name, bam_url in bam_urls.items(): # Generating unique panel name with hashlib xml_bam_panel = ET.SubElement( diff --git a/samplesheets/tests/test_utils.py b/samplesheets/tests/test_utils.py index c103fa17..86bb60fa 100644 --- a/samplesheets/tests/test_utils.py +++ b/samplesheets/tests/test_utils.py @@ -27,6 +27,7 @@ compare_inv_replace, get_webdav_url, get_ext_link_labels, + get_latest_file_path, ) from samplesheets.tests.test_io import ( SampleSheetIOMixin, @@ -52,6 +53,9 @@ ] IRODS_TICKET_STR = 'ooChaa1t' EXT_LINK_PATH_INVALID = '/tmp/NON_EXISTING_EXT_LINK_FILE.json' +BAM_PATH = '/sodarZone/coll_z/file_2023-02-28.bam' +BAM_PATH2 = '/sodarZone/coll_x/file_2023-03-01.bam' +CRAM_PATH = '/sodarZone/coll_y/file_2023-02-29.cram' class SamplesheetsUtilsTestBase( @@ -289,6 +293,20 @@ def test_get(self): @override_settings(SHEETS_EXTERNAL_LINK_PATH=EXT_LINK_PATH_INVALID) def test_get_default(self): - """Test retrievint default labels""" + """Test retrieving default labels""" labels = get_ext_link_labels() self.assertEqual(labels, DEFAULT_EXTERNAL_LINK_LABELS) + + +class TestGetLatestFilePath(SamplesheetsUtilsTestBase): + """Tests for get_latest_file_path()""" + + def test_get_bam(self): + """Test retrieval with two BAM paths""" + self.assertEqual(get_latest_file_path([BAM_PATH, BAM_PATH2]), BAM_PATH2) + + def test_get_mixed(self): + """Test retrieval with mixed BAM and CRAM paths""" + self.assertEqual( + get_latest_file_path([BAM_PATH, BAM_PATH2, CRAM_PATH]), BAM_PATH2 + ) From e1360a7badc863c30f80e282aee5233f63c35f1e Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Thu, 29 Feb 2024 12:00:15 +0100 Subject: [PATCH 26/33] fix finished zone status updating (#1909) --- CHANGELOG.rst | 2 ++ landingzones/tasks_taskflow.py | 12 ++++--- taskflowbackend/api.py | 36 +++++++++++++++----- taskflowbackend/tests/test_flows.py | 51 ++++++++++++++++++++++++++++- 4 files changed, 87 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 08fc022c..a78abdbc 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -59,6 +59,7 @@ Fixed - **Landingzones** - Stats badge displayed to superusers for ``DELETED`` zones (#1866) - Zone status updating not working in project details card (#1902) + - Modifying finished lock status allowed in ``SetLandingZoneStatusTask`` (#1909) - **Samplesheets** - Invalid WebDAV URLs generated in ``IrodsDataRequestListView`` (#1860) - Superuser not allowed to edit iRODS request from other users in UI (#1863) @@ -71,6 +72,7 @@ Fixed - Hardcoded iRODS path length in ``landing_zone_move`` (#1888) - Uncaught exceptions in ``SetAccessTask`` (#1906) - Crash in ``landing_zone_create`` with large amount of collections (#1905) + - Finished landing zone status modified by lock exception (#1909) Removed ------- diff --git a/landingzones/tasks_taskflow.py b/landingzones/tasks_taskflow.py index 71911796..875fee5b 100644 --- a/landingzones/tasks_taskflow.py +++ b/landingzones/tasks_taskflow.py @@ -19,6 +19,7 @@ from landingzones.constants import ( STATUS_BUSY, + STATUS_FINISHED, ZONE_STATUS_FAILED, ZONE_STATUS_NOT_CREATED, ZONE_STATUS_MOVED, @@ -381,10 +382,13 @@ def execute( *args, **kwargs ): - self.set_status( - landing_zone, flow_name, status, status_info, extra_data - ) - self.data_modified = True + # Prevent setting status if already finished (see #1909) + landing_zone.refresh_from_db() + if landing_zone.status not in STATUS_FINISHED: + self.set_status( + landing_zone, flow_name, status, status_info, extra_data + ) + self.data_modified = True super().execute(*args, **kwargs) def revert( diff --git a/taskflowbackend/api.py b/taskflowbackend/api.py index 708ff52a..60a47d36 100644 --- a/taskflowbackend/api.py +++ b/taskflowbackend/api.py @@ -8,6 +8,7 @@ ZONE_STATUS_NOT_CREATED, ZONE_STATUS_CREATING, ZONE_STATUS_FAILED, + STATUS_FINISHED, ) from landingzones.models import LandingZone @@ -62,6 +63,29 @@ def _raise_flow_exception(cls, ex_msg, tl_event=None, zone=None): # TODO: Create app alert for failure if async (see #1499) raise cls.FlowSubmitException(ex_msg) + @classmethod + def _raise_lock_exception(cls, ex_msg, tl_event=None, zone=None): + """ + Raise exception specifically for project lock errors. Updates the status + of the landing zone only if zone has not yet been finished by a + concurrent taskflow. + + :param ex_msg: Exception message (string) + :param tl_event: Timeline event or None + :param zone: LandingZone object or None + :raise: FlowSubmitException + """ + lock_zone = None + if zone: + zone.refresh_from_db() + if zone.status not in STATUS_FINISHED: + lock_zone = zone # Exclude zone if already finished (see #1909) + cls._raise_flow_exception( + LOCK_FAIL_MSG + ': ' + str(ex_msg), + tl_event, + lock_zone, + ) + @classmethod def get_flow( cls, @@ -137,10 +161,8 @@ def run_flow( # Acquire lock coordinator = lock_api.get_coordinator() if not coordinator: - cls._raise_flow_exception( - LOCK_FAIL_MSG + ': Failed to retrieve lock coordinator', - tl_event, - zone, + cls._raise_lock_exception( + 'Failed to retrieve lock coordinator', tl_event, zone ) else: lock_id = str(project.sodar_uuid) @@ -148,11 +170,7 @@ def run_flow( try: lock_api.acquire(lock) except Exception as ex: - cls._raise_flow_exception( - LOCK_FAIL_MSG + ': {}'.format(ex), - tl_event, - zone, - ) + cls._raise_lock_exception(str(ex), tl_event, zone) else: logger.info('Lock not required (flow.require_lock=False)') diff --git a/taskflowbackend/tests/test_flows.py b/taskflowbackend/tests/test_flows.py index 5d8454f4..130f3a84 100644 --- a/taskflowbackend/tests/test_flows.py +++ b/taskflowbackend/tests/test_flows.py @@ -81,6 +81,7 @@ UPDATED_DESC = 'updated description' SCRIPT_USER_NAME = 'script_user' IRODS_ROOT_PATH = 'sodar/root' +INVALID_REDIS_URL = 'redis://127.0.0.1:6666/0' class TaskflowbackendFlowTestBase(TaskflowViewTestBase): @@ -454,7 +455,7 @@ def setUp(self): self.make_irods_colls(self.investigation) def test_delete(self): - """Test landing_zone_delete with an empty landing zone""" + """Test landing_zone_delete with empty landing zone""" zone = self.make_landing_zone( title=ZONE_TITLE, project=self.project, @@ -561,6 +562,54 @@ def test_delete_files_restrict(self): self.assertEqual(self.irods.data_objects.exists(obj_path), False) self.assertEqual(self.irods.collections.exists(zone_path), False) + def test_delete_finished(self): + """Test landing_zone_delete with finished zone""" + # NOTE: This may happen with concurrent requests. See #1909, #1910 + zone = self.make_landing_zone( + title=ZONE_TITLE, + project=self.project, + user=self.user, + assay=self.assay, + description=ZONE_DESC, + status=ZONE_STATUS_DELETED, + ) + # Do not create in taskflow + self.assertEqual(zone.status, ZONE_STATUS_DELETED) + flow_data = {'zone_uuid': str(zone.sodar_uuid)} + flow = self.taskflow.get_flow( + irods_backend=self.irods_backend, + project=self.project, + flow_name='landing_zone_delete', + flow_data=flow_data, + ) + self._build_and_run(flow) + zone.refresh_from_db() + # This should still be deleted, not failed + self.assertEqual(zone.status, ZONE_STATUS_DELETED) + + @override_settings(REDIS_URL=INVALID_REDIS_URL) + def test_delete_finished_lock_failure(self): + """Test landing_zone_delete with finished zone and lock failure""" + zone = self.make_landing_zone( + title=ZONE_TITLE, + project=self.project, + user=self.user, + assay=self.assay, + description=ZONE_DESC, + status=ZONE_STATUS_DELETED, + ) + self.assertEqual(zone.status, ZONE_STATUS_DELETED) + flow_data = {'zone_uuid': str(zone.sodar_uuid)} + flow = self.taskflow.get_flow( + irods_backend=self.irods_backend, + project=self.project, + flow_name='landing_zone_delete', + flow_data=flow_data, + ) + self._build_and_run(flow) + zone.refresh_from_db() + self.assertEqual(zone.status, ZONE_STATUS_DELETED) + class TestLandingZoneMove( LandingZoneMixin, From e5a66d38df8278ddae91623b39068c043b56aeab Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Wed, 6 Mar 2024 15:51:06 +0100 Subject: [PATCH 27/33] add igv omit setting path and glob pattern support (#1923, #1924, #1925) --- CHANGELOG.rst | 5 + config/settings/base.py | 9 +- config/settings/test.py | 4 +- docs_manual/source/sodar_release_notes.rst | 1 + docs_manual/source/ui_project_update.rst | 22 ++- samplesheets/plugins.py | 24 ++-- samplesheets/studyapps/cancer/plugins.py | 5 +- .../cancer/tests/test_plugins_taskflow.py | 88 +++++++++--- samplesheets/studyapps/cancer/utils.py | 11 +- samplesheets/studyapps/germline/plugins.py | 24 ++-- .../germline/tests/test_plugins_taskflow.py | 89 +++++++++--- samplesheets/studyapps/germline/utils.py | 8 +- samplesheets/studyapps/tests/test_utils.py | 136 +++++++++++++++++- samplesheets/studyapps/utils.py | 44 +++--- 14 files changed, 361 insertions(+), 109 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a78abdbc..5cf243a5 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -21,6 +21,8 @@ Added - ``IrodsDataRequest`` timeline event extra data (#1912) - CRAM file support in study apps (#1908) - ``check_igv_file_suffix()`` helper in ``studyapps.utils`` (#1908) + - Path checking for IGV omit settings (#1923) + - Glob pattern support for IGV omit settings (#1923) - **Taskflowbackend** - Django settings in siteinfo (#1901) - ``BatchSetAccessTask`` in iRODS tasks (#1905) @@ -43,6 +45,8 @@ Changed - Improve Django messages for ``IrodsDataRequest`` exceptions (#1858) - Change ``IrodsDataRequest`` description if created in Ajax view (#1862) - Refactor ``IrodsDataRequestModifyMixin`` timeline helpers (#1913) + - Rename ``get_igv_omit_override()`` to ``get_igv_omit_list()`` (#1924) + - Rename ``check_igv_file_name()`` to ``check_igv_file_path()`` (#1923) - **Taskflowbackend** - Optimize ``landing_zone_move`` iRODS path retrieval (#1882) - Set zone status on uncaught errors in ``run_flow()`` (#1458) @@ -68,6 +72,7 @@ Fixed - Protocol ref editable for new row if disabled in column config (#1875) - Sheet template creation failure with slash characters in title/ID fields (#1896) - ``get_pedigree_file_path()`` used in cancer study app tests (#1914) + - IGV omit settings not correctly set on project creation (#1925) - **Taskflowbackend** - Hardcoded iRODS path length in ``landing_zone_move`` (#1888) - Uncaught exceptions in ``SetAccessTask`` (#1906) diff --git a/config/settings/base.py b/config/settings/base.py index 813a8c01..091a5791 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -807,13 +807,14 @@ def set_logging(level=None): # Remote sample sheet sync interval in minutes SHEETS_SYNC_INTERVAL = env.int('SHEETS_SYNC_INTERVAL', 5) -# BAM/CRAM file name suffixes to omit from study shortcuts and IGV sessions +# BAM/CRAM file path glob patterns to omit from study shortcuts and IGV sessions SHEETS_IGV_OMIT_BAM = env.list( - 'SHEETS_IGV_OMIT_BAM', default=['dragen_evidence.bam'] + 'SHEETS_IGV_OMIT_BAM', default=['*dragen_evidence.bam'] ) -# VCF file name suffixes to omit from study shortcuts and IGV sessions +# VCF file path glob patterns to omit from study shortcuts and IGV sessions SHEETS_IGV_OMIT_VCF = env.list( - 'SHEETS_IGV_OMIT_VCF', default=['cnv.vcf.gz', 'ploidy.vcf.gz', 'sv.vcf.gz'] + 'SHEETS_IGV_OMIT_VCF', + default=['*cnv.vcf.gz', '*ploidy.vcf.gz', '*sv.vcf.gz'], ) # Landingzones app settings diff --git a/config/settings/test.py b/config/settings/test.py index 76566c48..93410ead 100644 --- a/config/settings/test.py +++ b/config/settings/test.py @@ -107,8 +107,8 @@ SHEETS_EXTERNAL_LINK_PATH = os.path.join( ROOT_DIR, 'samplesheets/tests/config/ext_links.json' ) -SHEETS_IGV_OMIT_BAM = ['dragen_evidence.bam'] -SHEETS_IGV_OMIT_VCF = ['cnv.vcf.gz', 'ploidy.vcf.gz', 'sv.vcf.gz'] +SHEETS_IGV_OMIT_BAM = ['*dragen_evidence.bam'] +SHEETS_IGV_OMIT_VCF = ['*cnv.vcf.gz', '*ploidy.vcf.gz', '*sv.vcf.gz'] # Landingzones app settings LANDINGZONES_TRIGGER_ENABLE = True diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index 97409657..f5ccc1d7 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -14,6 +14,7 @@ v0.14.2 (WIP) Release for minor updates, maintenance and bug fixes. - Add CRAM file support for cancer/germline study links and IGV sessions +- Add path glob pattern support for IGV session BAM/VCF omit settings - Add assay plugin shortcut collection creation in landing zones - Add iRODS data request details in timeline events - Add landing zone statistics in siteinfo diff --git a/docs_manual/source/ui_project_update.rst b/docs_manual/source/ui_project_update.rst index 5f387d3b..24f0ab5b 100644 --- a/docs_manual/source/ui_project_update.rst +++ b/docs_manual/source/ui_project_update.rst @@ -53,12 +53,22 @@ Token for Sheet Synchronization IGV session genome Genome used in generating IGV session files for the project. The name needs to be in a format accepted by IGV. Affects cancer and germline projects. -BAM and CRAM files to omit from IGV sessions - Comma separated list of BAM and CRAM file suffixes to omit from IGV - sessions. Overrides site-wide setting, affects cancer and germline projects. -VCF files to omit from IGV sessions - Comma separated list of VCF file suffixes to omit from IGV sessions. - Overrides site-wide setting, affects cancer and germline projects. +BAM and CRAM paths to omit from IGV sessions + Comma-separated list of iRODS path glob patterns for omitting BAM and CRAM + files from IGV sessions and study shortcuts. Overrides site-wide setting, + affects cancer and germline projects. You can input paths to subcollections + under project sample data as well as file names. Using wildcards like ``*`` + and ``?`` is permitted. Providing full iRODS paths is not necessary: each + pattern is assumed to start with a ``*`` wildcard. An empty setting means no + files or collections are excluded. You need to run + :guilabel:`Update Sheet Cache` in the Sample Sheet application for changes + to take effect. +VCF paths to omit from IGV sessions + Comma-separated list of iRODS path glob patterns for omitting VCF files from + IGV sessions and study shortcuts. Overrides site-wide setting, affects + cancer and germline projects. Behaves similarly to the related BAM/CRAM + setting. You need to run :guilabel:`Update Sheet Cache` in the Sample Sheet + application for changes to take effect. IP Restrict Restrict project access to specific IP addresses if this is set. IP Allow List diff --git a/samplesheets/plugins.py b/samplesheets/plugins.py index b689fe5f..f93aead4 100644 --- a/samplesheets/plugins.py +++ b/samplesheets/plugins.py @@ -213,23 +213,23 @@ class ProjectAppPlugin( 'igv_omit_bam': { 'scope': SODAR_CONSTANTS['APP_SETTING_SCOPE_PROJECT'], 'type': 'STRING', - 'label': 'BAM and CRAM files to omit from IGV sessions', - 'default': '', - 'placeholder': ', '.join(settings.SHEETS_IGV_OMIT_BAM), - 'description': 'Comma separated list of BAM and CRAM file suffixes ' - 'to omit from generated IGV sessions. Overrides site-wide setting, ' - 'affects cancer and germline projects.', + 'label': 'BAM and CRAM paths to omit from IGV sessions', + 'default': ', '.join(settings.SHEETS_IGV_OMIT_BAM), + 'description': 'Comma-separated list of iRODS path glob patterns ' + 'for omitting BAM and CRAM files from IGV sessions and study ' + 'shortcuts. Overrides site-wide setting, affects cancer and ' + 'germline projects. Update sheet cache after updating this value.', 'user_modifiable': True, }, 'igv_omit_vcf': { 'scope': SODAR_CONSTANTS['APP_SETTING_SCOPE_PROJECT'], 'type': 'STRING', - 'label': 'VCF files to omit from IGV sessions', - 'default': '', - 'placeholder': ', '.join(settings.SHEETS_IGV_OMIT_VCF), - 'description': 'Comma separated list of VCF file suffixes to omit ' - 'from generated IGV sessions. Overrides site-wide setting, affects ' - 'cancer and germline projects.', + 'label': 'VCF paths to omit from IGV sessions', + 'default': ', '.join(settings.SHEETS_IGV_OMIT_VCF), + 'description': 'Comma-separated list of iRODS path glob patterns ' + 'for omitting VCF files from IGV sessions and study shortcuts. ' + 'Overrides site-wide setting, affects cancer and germline ' + 'projects. Update sheet cache after updating this value.', 'user_modifiable': True, }, } diff --git a/samplesheets/studyapps/cancer/plugins.py b/samplesheets/studyapps/cancer/plugins.py index 05c77016..169f6f41 100644 --- a/samplesheets/studyapps/cancer/plugins.py +++ b/samplesheets/studyapps/cancer/plugins.py @@ -18,10 +18,7 @@ get_latest_file_path, ) from samplesheets.studyapps.utils import get_igv_session_url, get_igv_irods_url -from samplesheets.utils import ( - get_isa_field_name, - get_last_material_index, -) +from samplesheets.utils import get_isa_field_name, get_last_material_index logger = logging.getLogger(__name__) diff --git a/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py b/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py index e85f30cc..8634c0cf 100644 --- a/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py +++ b/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py @@ -3,7 +3,6 @@ import os from django.conf import settings -from django.test import override_settings from django.urls import reverse # Projectroles dependency @@ -392,7 +391,7 @@ def test_get_shortcut_links_invalid(self): self.assertEqual(len(sl['data']['bam']['files']), 0) self.assertEqual(len(sl['data']['vcf']['files']), 0) - def test_get_shortcut_links_files_omit(self): + def test_get_shortcut_links_omit(self): """Test get_shortcut_links() with omittable files in iRODS""" self.irods.collections.create(self.source_path) # Create omittable files which come before real ones alphabetically @@ -452,7 +451,7 @@ def test_get_shortcut_links_files_omit(self): ), ) - def test_get_shortcut_links_files_omit_only(self): + def test_get_shortcut_links_omit_only(self): """Test get_shortcut_links() with only omittable files in iRODS""" self.irods.collections.create(self.source_path) bam_path_omit = os.path.join( @@ -472,9 +471,37 @@ def test_get_shortcut_links_files_omit_only(self): self.assertEqual(len(sl['data']['bam']['files']), 0) self.assertEqual(len(sl['data']['vcf']['files']), 0) - @override_settings(SHEETS_IGV_OMIT_BAM=['omit.cram']) - def test_get_shortcut_links_files_omit_only_cram(self): + def test_get_shortcut_links_omit_only_empty_overrides(self): + """Test get_shortcut_links() with empty project overrides""" + app_settings.set( + 'samplesheets', 'igv_omit_bam', '', project=self.project + ) + app_settings.set( + 'samplesheets', 'igv_omit_vcf', '', project=self.project + ) + self.irods.collections.create(self.source_path) + bam_path_omit = os.path.join( + self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID_NORMAL) + ) + vcf_path_omit = os.path.join( + self.source_path, '{}_cnv.vcf.gz'.format(SAMPLE_ID_NORMAL) + ) + self.irods.data_objects.create(bam_path_omit) + self.irods.data_objects.create(vcf_path_omit) + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sl = self.plugin.get_shortcut_links( + self.study, study_tables, case=[SOURCE_ID_NORMAL] + ) + self.assertEqual(len(sl['data']['session']['files']), 1) + self.assertEqual(len(sl['data']['bam']['files']), 1) + self.assertEqual(len(sl['data']['vcf']['files']), 1) + + def test_get_shortcut_links_omit_only_cram(self): """Test get_shortcut_links() with omittable CRAM file in iRODS""" + app_settings.set( + 'samplesheets', 'igv_omit_bam', '*omit.cram', project=self.project + ) self.irods.collections.create(self.source_path) cram_path_omit = os.path.join( self.source_path, '{}_omit.cram'.format(SAMPLE_ID_NORMAL) @@ -489,18 +516,18 @@ def test_get_shortcut_links_files_omit_only_cram(self): self.assertEqual(len(sl['data']['bam']['files']), 0) self.assertEqual(len(sl['data']['vcf']['files']), 0) - def test_get_shortcut_links_files_omit_override(self): + def test_get_shortcut_links_omit_override(self): """Test get_shortcut_links() with project-specific omit override""" app_settings.set( 'samplesheets', 'igv_omit_bam', - 'test.bam, xxx.bam', + '*test.bam,*xxx.bam', project=self.project, ) app_settings.set( 'samplesheets', 'igv_omit_vcf', - 'test.vcf.gz, yyy.vcf.gz', + '*test.vcf.gz,*yyy.vcf.gz', project=self.project, ) self.irods.collections.create(self.source_path) @@ -554,7 +581,7 @@ def test_update_cache_files(self): self.assertEqual(ci['bam'][CASE_IDS[i]], None) self.assertEqual(ci['vcf'][CASE_IDS[i]], None) - def test_update_cache_files_cram(self): + def test_update_cache_cram(self): """Test update_cache() with CRAM file in iRODS""" self.irods.collections.create(self.source_path) cram_path = os.path.join( @@ -575,7 +602,7 @@ def test_update_cache_files_cram(self): self.assertEqual(ci['bam'][CASE_IDS[i]], None) self.assertEqual(ci['vcf'][CASE_IDS[i]], None) - def test_update_cache_files_omit(self): + def test_update_cache_omit(self): """Test update_cache() with omittable files in iRODS""" self.irods.collections.create(self.source_path) # Create omittable files which come before real ones alphabetically @@ -605,7 +632,7 @@ def test_update_cache_files_omit(self): self.assertEqual(ci['bam'][CASE_IDS[i]], None) self.assertEqual(ci['vcf'][CASE_IDS[i]], None) - def test_update_cache_files_omit_only(self): + def test_update_cache_omit_only(self): """Test update_cache() with only omittable files in iRODS""" self.irods.collections.create(self.source_path) bam_path_omit = os.path.join( @@ -624,9 +651,38 @@ def test_update_cache_files_omit_only(self): self.assertEqual(ci['bam'][CASE_IDS[i]], None) self.assertEqual(ci['vcf'][CASE_IDS[i]], None) - @override_settings(SHEETS_IGV_OMIT_BAM=['omit.cram']) - def test_update_cache_files_omit_only_cram(self): + def test_update_cache_omit_only_empty_overrides(self): + """Test update_cache() with empty project overrides""" + app_settings.set( + 'samplesheets', 'igv_omit_bam', '', project=self.project + ) + app_settings.set( + 'samplesheets', 'igv_omit_vcf', '', project=self.project + ) + self.irods.collections.create(self.source_path) + bam_path_omit = os.path.join( + self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID_NORMAL) + ) + vcf_path_omit = os.path.join( + self.source_path, '{}_cnv.vcf.gz'.format(SAMPLE_ID_NORMAL) + ) + self.irods.data_objects.create(bam_path_omit) + self.irods.data_objects.create(vcf_path_omit) + self.plugin.update_cache(self.cache_name, self.project) + ci = self.cache_backend.get_cache_item( + APP_NAME, self.cache_name, self.project + ).data + self.assertEqual(ci['bam'][CASE_IDS[0]], bam_path_omit) + self.assertEqual(ci['vcf'][CASE_IDS[0]], vcf_path_omit) + for i in range(1, len(CASE_IDS) - 1): + self.assertEqual(ci['bam'][CASE_IDS[i]], None) + self.assertEqual(ci['vcf'][CASE_IDS[i]], None) + + def test_update_cache_omit_only_cram(self): """Test update_cache() with omittable CRAM file in iRODS""" + app_settings.set( + 'samplesheets', 'igv_omit_bam', '*omit.cram', project=self.project + ) self.irods.collections.create(self.source_path) cram_path_omit = os.path.join( self.source_path, '{}_omit.cram'.format(SAMPLE_ID_NORMAL) @@ -644,18 +700,18 @@ def test_update_cache_files_omit_only_cram(self): self.assertEqual(ci['bam'][CASE_IDS[i]], None) self.assertEqual(ci['vcf'][CASE_IDS[i]], None) - def test_update_cache_files_omit_override(self): + def test_update_cache_omit_override(self): """Test update_cache() with project-specific omit override""" app_settings.set( 'samplesheets', 'igv_omit_bam', - 'test.bam, xxx.bam', + '*test.bam,*xxx.bam', project=self.project, ) app_settings.set( 'samplesheets', 'igv_omit_vcf', - 'test.vcf.gz, yyy.vcf.gz', + '*test.vcf.gz,*yyy.vcf.gz', project=self.project, ) self.irods.collections.create(self.source_path) diff --git a/samplesheets/studyapps/cancer/utils.py b/samplesheets/studyapps/cancer/utils.py index a5c38c75..89c9c536 100644 --- a/samplesheets/studyapps/cancer/utils.py +++ b/samplesheets/studyapps/cancer/utils.py @@ -3,9 +3,9 @@ import os from samplesheets.studyapps.utils import ( - get_igv_omit_override, + get_igv_omit_list, check_igv_file_suffix, - check_igv_file_name, + check_igv_file_path, ) from samplesheets.utils import get_latest_file_path @@ -25,14 +25,13 @@ def get_library_file_path(assay, library_name, file_type, irods_backend, irods): assay_path = irods_backend.get_path(assay) query_path = os.path.join(assay_path, library_name) file_paths = [] - override = get_igv_omit_override(assay.get_project(), file_type) + omit_list = get_igv_omit_list(assay.get_project(), file_type) try: obj_list = irods_backend.get_objects(irods, query_path) for obj in obj_list: - obj_name = obj['name'].lower() if check_igv_file_suffix( - obj_name, file_type - ) and check_igv_file_name(obj_name, file_type, override): + obj['name'].lower(), file_type + ) and check_igv_file_path(obj['path'], omit_list): file_paths.append(obj['path']) except Exception: pass diff --git a/samplesheets/studyapps/germline/plugins.py b/samplesheets/studyapps/germline/plugins.py index 3e90f83a..9041b873 100644 --- a/samplesheets/studyapps/germline/plugins.py +++ b/samplesheets/studyapps/germline/plugins.py @@ -13,21 +13,20 @@ from samplesheets.models import Investigation, Study, GenericMaterial from samplesheets.plugins import SampleSheetStudyPluginPoint from samplesheets.rendering import SampleSheetTableBuilder +from samplesheets.studyapps.germline.utils import ( + get_pedigree_file_path, + get_families, + get_family_sources, +) from samplesheets.studyapps.utils import ( - get_igv_omit_override, + get_igv_omit_list, check_igv_file_suffix, - check_igv_file_name, + check_igv_file_path, get_igv_session_url, get_igv_irods_url, ) from samplesheets.utils import get_index_by_header -from samplesheets.studyapps.germline.utils import ( - get_pedigree_file_path, - get_families, - get_family_sources, -) - logger = logging.getLogger(__name__) table_builder = SampleSheetTableBuilder() @@ -36,7 +35,6 @@ # SODAR constants PROJECT_TYPE_PROJECT = SODAR_CONSTANTS['PROJECT_TYPE_PROJECT'] - # Local constants APP_NAME = 'samplesheets.studyapps.germline' @@ -323,8 +321,8 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend): logger.error('Error querying for study objects: {}'.format(ex)) project = study.get_project() - bam_override = get_igv_omit_override(project, 'bam') - vcf_override = get_igv_omit_override(project, 'vcf') + bam_omit_list = get_igv_omit_list(project, 'bam') + vcf_omit_list = get_igv_omit_list(project, 'vcf') for assay in study.assays.all(): assay_plugin = assay.get_plugin() @@ -353,7 +351,7 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend): for o in study_objs if o['path'].startswith(path + '/') and check_igv_file_suffix(o['name'], 'bam') - and check_igv_file_name(o['name'], 'bam', bam_override) + and check_igv_file_path(o['path'], bam_omit_list) ] row_fam = row[fam_idx]['value'] # Add VCF objects @@ -369,7 +367,7 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend): for o in study_objs if o['path'].startswith(path + '/') and o['name'].lower().endswith('vcf.gz') - and check_igv_file_name(o['name'], 'vcf', vcf_override) + and check_igv_file_path(o['path'], vcf_omit_list) ] # Update data diff --git a/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py b/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py index 7fc66a7a..01776764 100644 --- a/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py +++ b/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py @@ -3,7 +3,6 @@ import os from django.conf import settings -from django.test import override_settings from django.urls import reverse # Projectroles dependency @@ -290,7 +289,7 @@ def test_get_shortcut_links_files(self): + get_pedigree_file_path('vcf', self.source, study_tables), ) - def test_get_shortcut_links_files_cram(self): + def test_get_shortcut_links_cram(self): """Test get_shortcut_links() with CRAM file in iRODS""" self.irods.collections.create(self.source_path) cram_path = os.path.join( @@ -430,7 +429,7 @@ def test_get_shortcut_links_invalid(self): self.assertEqual(len(sl['data']['bam']['files']), 0) self.assertEqual(len(sl['data']['vcf']['files']), 0) - def test_get_shortcut_links_files_omit(self): + def test_get_shortcut_links_omit(self): """Test get_shortcut_links() with omittable files in iRODS""" self.irods.collections.create(self.source_path) bam_path = os.path.join( @@ -477,7 +476,7 @@ def test_get_shortcut_links_files_omit(self): + get_pedigree_file_path('vcf', self.source, study_tables), ) - def test_get_shortcut_links_files_omit_only(self): + def test_get_shortcut_links_omit_only(self): """Test get_shortcut_links() with only omittable files in iRODS""" self.irods.collections.create(self.source_path) bam_path_omit = os.path.join( @@ -498,9 +497,38 @@ def test_get_shortcut_links_files_omit_only(self): self.assertEqual(len(sl['data']['bam']['files']), 0) self.assertEqual(len(sl['data']['vcf']['files']), 0) - @override_settings(SHEETS_IGV_OMIT_BAM=['omit.cram']) - def test_get_shortcut_links_files_omit_cram(self): + def test_get_shortcut_links_omit_only_empty_overrides(self): + """Test get_shortcut_links() with empty project overrides""" + app_settings.set( + 'samplesheets', 'igv_omit_bam', '', project=self.project + ) + app_settings.set( + 'samplesheets', 'igv_omit_vcf', '', project=self.project + ) + self.irods.collections.create(self.source_path) + bam_path_omit = os.path.join( + self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID) + ) + vcf_path_omit = os.path.join( + self.source_path, '{}_cnv.vcf.gz'.format(FAMILY_ID) + ) + self.irods.data_objects.create(bam_path_omit) + self.irods.data_objects.create(vcf_path_omit) + + self.plugin.update_cache(self.cache_name, self.project) + study_tables = self.tb.build_study_tables(self.study) + sl = self.plugin.get_shortcut_links( + self.study, study_tables, family=[FAMILY_ID] + ) + self.assertEqual(len(sl['data']['session']['files']), 1) + self.assertEqual(len(sl['data']['bam']['files']), 1) + self.assertEqual(len(sl['data']['vcf']['files']), 1) + + def test_get_shortcut_links_omit_cram(self): """Test get_shortcut_links() with omittable CRAM file in iRODS""" + app_settings.set( + 'samplesheets', 'igv_omit_bam', '*omit.cram', project=self.project + ) self.irods.collections.create(self.source_path) cram_path_omit = os.path.join( self.source_path, '{}_omit.cram'.format(SAMPLE_ID) @@ -520,18 +548,18 @@ def test_get_shortcut_links_files_omit_cram(self): self.assertEqual(len(sl['data']['bam']['files']), 0) self.assertEqual(len(sl['data']['vcf']['files']), 0) - def test_get_shortcut_links_files_omit_override(self): + def test_get_shortcut_links_omit_override(self): """Test get_shortcut_links() with project-specific omit override""" app_settings.set( 'samplesheets', 'igv_omit_bam', - 'test.bam, xxx.bam', + '*test.bam,*xxx.bam', project=self.project, ) app_settings.set( 'samplesheets', 'igv_omit_vcf', - 'test.vcf.gz, yyy.vcf.gz', + '*test.vcf.gz,*yyy.vcf.gz', project=self.project, ) self.irods.collections.create(self.source_path) @@ -584,7 +612,7 @@ def test_update_cache_files(self): self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path) self.assertIsNone(ci['vcf'][FAMILY_ID2]) - def test_update_cache_files_cram(self): + def test_update_cache_cram(self): """Test update_cache() with CRAM file in iRODS""" self.irods.collections.create(self.source_path) cram_path = os.path.join( @@ -603,7 +631,7 @@ def test_update_cache_files_cram(self): self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path) self.assertIsNone(ci['vcf'][FAMILY_ID2]) - def test_update_cache_files_omit(self): + def test_update_cache_omit(self): """Test update_cache() with omittable files in iRODS""" self.irods.collections.create(self.source_path) # Create omittable files which come before real ones alphabetically @@ -631,7 +659,7 @@ def test_update_cache_files_omit(self): self.assertEqual(ci['bam'][self.source.name], bam_path) self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path) - def test_update_cache_files_omit_only(self): + def test_update_cache_omit_only(self): """Test update_cache() with only omittable files in iRODS""" self.irods.collections.create(self.source_path) bam_path_omit = os.path.join( @@ -650,9 +678,36 @@ def test_update_cache_files_omit_only(self): self.assertIsNone(ci['bam'][self.source.name]) self.assertIsNone(ci['vcf'][FAMILY_ID]) - @override_settings(SHEETS_IGV_OMIT_BAM=['omit.cram']) - def test_update_cache_files_omit_only_cram(self): + def test_update_cache_omit_only_empty_overrides(self): + """Test update_cache() with empty project overrides""" + app_settings.set( + 'samplesheets', 'igv_omit_bam', '', project=self.project + ) + app_settings.set( + 'samplesheets', 'igv_omit_vcf', '', project=self.project + ) + self.irods.collections.create(self.source_path) + bam_path_omit = os.path.join( + self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID) + ) + vcf_path_omit = os.path.join( + self.source_path, '{}_cnv.vcf.gz'.format(FAMILY_ID) + ) + self.irods.data_objects.create(bam_path_omit) + self.irods.data_objects.create(vcf_path_omit) + self.plugin.update_cache(self.cache_name, self.project) + ci = self.cache_backend.get_cache_item( + APP_NAME, self.cache_name, self.project + ).data + # Omitted files should not be returned + self.assertEqual(ci['bam'][self.source.name], bam_path_omit) + self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path_omit) + + def test_update_cache_omit_only_cram(self): """Test update_cache() with omittable CRAM file in iRODS""" + app_settings.set( + 'samplesheets', 'igv_omit_bam', '*omit.cram', project=self.project + ) self.irods.collections.create(self.source_path) cram_path_omit = os.path.join( self.source_path, '{}_omit.cram'.format(SAMPLE_ID) @@ -669,18 +724,18 @@ def test_update_cache_files_omit_only_cram(self): self.assertIsNone(ci['bam'][self.source.name]) self.assertIsNone(ci['vcf'][FAMILY_ID]) - def test_update_cache_files_omit_override(self): + def test_update_cache_omit_override(self): """Test update_cache() with project-specific omit override""" app_settings.set( 'samplesheets', 'igv_omit_bam', - 'test.bam, xxx.bam', + '*test.bam,*xxx.bam', project=self.project, ) app_settings.set( 'samplesheets', 'igv_omit_vcf', - 'test.vcf.gz, yyy.vcf.gz', + '*test.vcf.gz,*yyy.vcf.gz', project=self.project, ) self.irods.collections.create(self.source_path) diff --git a/samplesheets/studyapps/germline/utils.py b/samplesheets/studyapps/germline/utils.py index 1c526d3f..69d4ced6 100644 --- a/samplesheets/studyapps/germline/utils.py +++ b/samplesheets/studyapps/germline/utils.py @@ -7,8 +7,8 @@ from samplesheets.models import GenericMaterial from samplesheets.studyapps.utils import ( - get_igv_omit_override, - check_igv_file_name, + get_igv_omit_list, + check_igv_file_path, check_igv_file_suffix, ) from samplesheets.utils import get_index_by_header @@ -32,7 +32,7 @@ def get_pedigree_file_path(file_type, source, study_tables): raise Exception('iRODS Backend not available') query_paths = [] - override = get_igv_omit_override(source.study.get_project(), file_type) + omit_list = get_igv_omit_list(source.study.get_project(), file_type) def _get_val_by_index(row, idx): return row[idx]['value'] if idx else None @@ -85,7 +85,7 @@ def _get_val_by_index(row, idx): if ( obj['path'].startswith(query_path + '/') and check_igv_file_suffix(obj['name'], file_type) - and check_igv_file_name(obj['name'], file_type, override) + and check_igv_file_path(obj['path'], omit_list) ): file_paths.append(obj['path']) logger.debug('Added path: {}'.format(obj['path'])) diff --git a/samplesheets/studyapps/tests/test_utils.py b/samplesheets/studyapps/tests/test_utils.py index 3eacfaaf..57f1a772 100644 --- a/samplesheets/studyapps/tests/test_utils.py +++ b/samplesheets/studyapps/tests/test_utils.py @@ -14,13 +14,22 @@ from projectroles.tests.test_models import ProjectMixin, RoleAssignmentMixin from samplesheets.plugins import IGV_DEFAULT_GENOME -from samplesheets.studyapps.utils import check_igv_file_suffix, get_igv_xml +from samplesheets.studyapps.utils import ( + get_igv_omit_list, + check_igv_file_suffix, + check_igv_file_path, + get_igv_xml, +) from samplesheets.tests.test_io import SampleSheetIOMixin app_settings = AppSettingAPI() +# Local constants +INVALID_FILE_TYPE = 'INVALID_TYPE' + + class TestStudyAppUtilsBase( ProjectMixin, RoleAssignmentMixin, SampleSheetIOMixin, TestCase ): @@ -41,7 +50,78 @@ def setUp(self): ) -class TestCheckIGVFileSuffix(TestCase): +class TestGetIGVOmitList(TestStudyAppUtilsBase): + """Tests for get_igv_omit_list()""" + + def test_get(self): + """Test get_igv_omit_list()""" + self.assertEqual( + get_igv_omit_list(self.project, 'bam'), ['*dragen_evidence.bam'] + ) + self.assertEqual( + get_igv_omit_list(self.project, 'vcf'), + ['*cnv.vcf.gz', '*ploidy.vcf.gz', '*sv.vcf.gz'], + ) + + def test_get_invalid_file_type(self): + """Test get_igv_omit_list() with invalid file type""" + with self.assertRaises(ValueError): + get_igv_omit_list(self.project, INVALID_FILE_TYPE) + + def test_get_no_preceeding_asterisk(self): + """Test get_igv_omit_list() without preceeding asterisk""" + # NOTE: We can't use override_settings here + app_settings.set( + 'samplesheets', + 'igv_omit_bam', + 'dragen_evidence.bam', + project=self.project, + ) + app_settings.set( + 'samplesheets', 'igv_omit_vcf', 'cnv.vcf.gz', project=self.project + ) + self.assertEqual( + get_igv_omit_list(self.project, 'bam'), ['*dragen_evidence.bam'] + ) + self.assertEqual( + get_igv_omit_list(self.project, 'vcf'), ['*cnv.vcf.gz'] + ) + + def test_get_mixed_preceeding_asterisks(self): + """Test get_igv_omit_list() with mixed preceeding asterisk""" + app_settings.set( + 'samplesheets', + 'igv_omit_bam', + '*xxx.bam,yyy.bam', + project=self.project, + ) + app_settings.set( + 'samplesheets', + 'igv_omit_vcf', + '*xxx.vcf.gz,yyy.vcf.gz', + project=self.project, + ) + self.assertEqual( + get_igv_omit_list(self.project, 'bam'), ['*xxx.bam', '*yyy.bam'] + ) + self.assertEqual( + get_igv_omit_list(self.project, 'vcf'), + ['*xxx.vcf.gz', '*yyy.vcf.gz'], + ) + + def test_get_empty_setting(self): + """Test get_igv_omit_list() with empty setting values""" + app_settings.set( + 'samplesheets', 'igv_omit_bam', '', project=self.project + ) + app_settings.set( + 'samplesheets', 'igv_omit_vcf', '', project=self.project + ) + self.assertEqual(get_igv_omit_list(self.project, 'bam'), []) + self.assertEqual(get_igv_omit_list(self.project, 'vcf'), []) + + +class TestCheckIGVFileSuffix(TestCase): # TestStudyAppUtilsBase not needed """Tests for check_igv_file_suffix()""" def test_check_bam(self): @@ -74,6 +154,58 @@ def test_check_invalid_type(self): check_igv_file_suffix('test.bam', 'INVALID') +class TestCheckIGVFilePath(TestCase): + """Tests for check_igv_file_path()""" + + def test_path(self): + """Test check_igv_file_path()""" + path = 'xxx/yyy.bam' + omit_list = ['*zzz.bam'] + self.assertTrue(check_igv_file_path(path, omit_list)) + + def test_path_omit_file(self): + """Test check_igv_file_path() with file name in omit list""" + path = 'xxx/yyy.bam' + omit_list = ['*yyy.bam'] + self.assertFalse(check_igv_file_path(path, omit_list)) + + def test_path_empty_list(self): + """Test check_igv_file_path() with empty omit list""" + path = 'xxx/yyy.bam' + omit_list = [] + self.assertTrue(check_igv_file_path(path, omit_list)) + + def test_path_omit_multiple(self): + """Test check_igv_file_path() with multiple patterns""" + path = 'xxx/yyy.bam' + omit_list = ['*yyy.bam', '*zzz.bam'] + self.assertFalse(check_igv_file_path(path, omit_list)) + + def test_path_omit_no_math(self): + """Test check_igv_file_path() with multiple non-matching patterns""" + path = 'xxx/yyy.bam' + omit_list = ['*aaa.bam', '*bbb.bam'] + self.assertTrue(check_igv_file_path(path, omit_list)) + + def test_path_omit_case(self): + """Test check_igv_file_path() with file name in different case""" + path = 'xxx/YYY.BAM' + omit_list = ['*yyy.bam'] + self.assertFalse(check_igv_file_path(path, omit_list)) + + def test_path_omit_collections(self): + """Test check_igv_file_path() with matching collections""" + path = '000/aaa/bbb/yyy.bam' + omit_list = ['*/aaa/bbb/*'] + self.assertFalse(check_igv_file_path(path, omit_list)) + + def test_path_omit_collections_middle(self): + """Test check_igv_file_path() with partial collection match""" + path = '000/aaa/bbb/yyy.bam' + omit_list = ['*/aaa/*/yyy.bam'] + self.assertFalse(check_igv_file_path(path, omit_list)) + + class TestGetIGVXML(TestStudyAppUtilsBase): """Tests for get_igv_xml()""" diff --git a/samplesheets/studyapps/utils.py b/samplesheets/studyapps/utils.py index 335aa49a..cebf5ae2 100644 --- a/samplesheets/studyapps/utils.py +++ b/samplesheets/studyapps/utils.py @@ -3,6 +3,7 @@ import hashlib from lxml import etree as ET +from pathlib import PurePosixPath from django.conf import settings from django.urls import reverse @@ -21,24 +22,27 @@ INVALID_TYPE_MSG = 'Invalid value for file_type' -def get_igv_omit_override(project, file_type): +def get_igv_omit_list(project, file_type): """ - Get IGV omit override setting for a project. NOTE: Added as a separate - method to avoid redundant database queries. + Get list of IGV omit glob patterns for a specific file type in a project. + NOTE: Added as a separate method to avoid redundant database queries. :param project: Project object :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM) - :return: List or None + :return: List (appends * in front of each path if missing) """ ft = file_type.lower() if ft not in ['bam', 'vcf']: raise ValueError(INVALID_TYPE_MSG) - setting = app_settings.get( + setting_val = app_settings.get( 'samplesheets', 'igv_omit_{}'.format(ft), project=project ) - if setting: - return [s.strip() for s in setting.split(',')] - return None + if not setting_val: + return [] + return [ + '{}{}'.format('*' if not s.strip().startswith('*') else '', s.strip()) + for s in setting_val.split(',') + ] def check_igv_file_suffix(file_name, file_type): @@ -60,24 +64,18 @@ def check_igv_file_suffix(file_name, file_type): ) -def check_igv_file_name(file_name, file_type, override=None): +def check_igv_file_path(path, omit_list): """ - Check if file is acceptable for IGV session inclusion. Returns False if - suffix is found in env vars of omittable files. + Check if file path is acceptable for IGV session inclusion. Returns False if + pattern is found in IGV omit settings. - :param file_name: String - :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM) - :param override: File suffixes to override site-wide setting (list or None) - :raise: ValueError if file_type is incorrect - :return: Boolean (True if name is OK) + :param path: Full or partial iRODS path (string) + :param omit_list: List of path glob patterns to omit (list) + :return: Boolean (True if path is OK) """ - if file_type.lower() not in ['bam', 'vcf']: - raise ValueError(INVALID_TYPE_MSG) - fn = file_name.lower() - if override: - return not any([s.lower() for s in override if fn.endswith(s)]) - ol = getattr(settings, 'SHEETS_IGV_OMIT_' + file_type.upper(), []) - return not any([s.lower() for s in ol if fn.endswith(s)]) + return not any( + [p for p in omit_list if PurePosixPath(path.lower()).match(p.lower())] + ) def get_igv_session_url(source, app_name, merge=False): From 288da3ec71b6e5dbd4163abaaf162c45e8da7bbd Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Thu, 7 Mar 2024 10:34:53 +0100 Subject: [PATCH 28/33] fix germline study cache update with no family column (#1921) --- CHANGELOG.rst | 1 + samplesheets/studyapps/germline/plugins.py | 7 +++-- .../germline/tests/test_plugins_taskflow.py | 26 +++++++++++++++++++ 3 files changed, 30 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5cf243a5..50069b87 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -73,6 +73,7 @@ Fixed - Sheet template creation failure with slash characters in title/ID fields (#1896) - ``get_pedigree_file_path()`` used in cancer study app tests (#1914) - IGV omit settings not correctly set on project creation (#1925) + - Germline study cache build crash with no family column (#1921) - **Taskflowbackend** - Hardcoded iRODS path length in ``landing_zone_move`` (#1888) - Uncaught exceptions in ``SetAccessTask`` (#1906) diff --git a/samplesheets/studyapps/germline/plugins.py b/samplesheets/studyapps/germline/plugins.py index 9041b873..995805c1 100644 --- a/samplesheets/studyapps/germline/plugins.py +++ b/samplesheets/studyapps/germline/plugins.py @@ -353,11 +353,10 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend): and check_igv_file_suffix(o['name'], 'bam') and check_igv_file_path(o['path'], bam_omit_list) ] - row_fam = row[fam_idx]['value'] # Add VCF objects - if row_fam: - vcf_query_id = row_fam - else: + if fam_idx and row[fam_idx].get('value'): + vcf_query_id = row[fam_idx]['value'] + else: # If family column isn't present/filled, use source name vcf_query_id = source_name if vcf_query_id not in vcf_paths: vcf_paths[vcf_query_id] = [] diff --git a/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py b/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py index 01776764..ee960a0b 100644 --- a/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py +++ b/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py @@ -13,6 +13,7 @@ # Taskflowbackend dependency from taskflowbackend.tests.base import TaskflowViewTestBase +from samplesheets.models import GenericMaterial from samplesheets.rendering import SampleSheetTableBuilder from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR from samplesheets.tests.test_models import SampleSheetModelMixin @@ -763,3 +764,28 @@ def test_update_cache_omit_override(self): # Omitted files should not be returned self.assertEqual(ci['bam'][self.source.name], bam_path_omit) self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path_omit) + + def test_update_cache_no_family(self): + """Test update_cache() with no family characteristic in source""" + # Clear the family characteristics and header + for m in GenericMaterial.objects.filter( + study=self.study, item_type='SOURCE' + ): + m.characteristics.pop('Family') + m.headers.remove('Characteristics[Family]') + m.save() + self.irods.collections.create(self.source_path) + bam_path = os.path.join( + self.source_path, '{}_test.bam'.format(SAMPLE_ID) + ) + vcf_path = os.path.join( + self.source_path, '{}_test.vcf.gz'.format(SAMPLE_ID) + ) + self.irods.data_objects.create(bam_path) + self.irods.data_objects.create(vcf_path) + self.plugin.update_cache(self.cache_name, self.project) + ci = self.cache_backend.get_cache_item( + APP_NAME, self.cache_name, self.project + ).data + self.assertEqual(ci['bam'][self.source.name], bam_path) + self.assertEqual(ci['vcf'][self.source.name], vcf_path) From 5b9c8179b1a414020346131122790dea2c18c2bb Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Thu, 7 Mar 2024 11:25:54 +0100 Subject: [PATCH 29/33] upgrade to cubi-isa-templates==0.1.2 (#1854) --- CHANGELOG.rst | 1 + requirements/base.txt | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 50069b87..06b1c676 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -35,6 +35,7 @@ Changed - Upgrade to Django v3.2.24 (#1854) - Upgrade to django-sodar-core v0.13.4 (#1899) - Upgrade critical Vue app dependencies (#1854) + - Upgrade to cubi-isa-templates v0.1.2 (#1854) - Update installation documentation (#1871) - **Irodsbackend** - Reduce redundant object queries (#1883) diff --git a/requirements/base.txt b/requirements/base.txt index 97b2b90e..ffa20142 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -102,8 +102,8 @@ pronto==2.5.0 # For OWL conversion # CUBI ISA-Tab templates cookiecutter==2.2.3 -# cubi-isa-templates==0.1.1 --e git+https://github.com/bihealth/cubi-isa-templates.git@d3ebe866f3823d81fdb358042ef7ed4d8b8fd100#egg=cubi-isa-templates +cubi-isa-templates==0.1.2 +# -e git+https://github.com/bihealth/cubi-isa-templates.git@d3ebe866f3823d81fdb358042ef7ed4d8b8fd100#egg=cubi-isa-templates # Taskflow requirements tooz==3.0.0 From c7d29950e53c7dda6914ac8e3abf6ff5aa0ef670 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Wed, 13 Mar 2024 16:13:42 +0100 Subject: [PATCH 30/33] fix named process pooling and renaming issues (#1904) --- CHANGELOG.rst | 1 + ...> i_small_assay_insert_pool_material.json} | 0 .../i_small_assay_insert_pool_process.json | 124 ++++++++++ samplesheets/tests/test_views_ajax.py | 230 ++++++++++-------- samplesheets/views_ajax.py | 14 +- samplesheets/vueapp/src/App.vue | 68 ++++-- .../src/components/editors/DataCellEditor.vue | 37 +-- .../components/renderers/RowEditRenderer.vue | 12 +- .../vueapp/tests/unit/DataCellEditor.spec.js | 48 ++++ 9 files changed, 374 insertions(+), 160 deletions(-) rename samplesheets/tests/edit/{i_small_assay_insert_pool.json => i_small_assay_insert_pool_material.json} (100%) create mode 100644 samplesheets/tests/edit/i_small_assay_insert_pool_process.json diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 06b1c676..3c7c7733 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -48,6 +48,7 @@ Changed - Refactor ``IrodsDataRequestModifyMixin`` timeline helpers (#1913) - Rename ``get_igv_omit_override()`` to ``get_igv_omit_list()`` (#1924) - Rename ``check_igv_file_name()`` to ``check_igv_file_path()`` (#1923) + - Named process pooling and renaming in sheet editor (#1904) - **Taskflowbackend** - Optimize ``landing_zone_move`` iRODS path retrieval (#1882) - Set zone status on uncaught errors in ``run_flow()`` (#1458) diff --git a/samplesheets/tests/edit/i_small_assay_insert_pool.json b/samplesheets/tests/edit/i_small_assay_insert_pool_material.json similarity index 100% rename from samplesheets/tests/edit/i_small_assay_insert_pool.json rename to samplesheets/tests/edit/i_small_assay_insert_pool_material.json diff --git a/samplesheets/tests/edit/i_small_assay_insert_pool_process.json b/samplesheets/tests/edit/i_small_assay_insert_pool_process.json new file mode 100644 index 00000000..6bb7de85 --- /dev/null +++ b/samplesheets/tests/edit/i_small_assay_insert_pool_process.json @@ -0,0 +1,124 @@ +{ + "study": "11111111-1111-1111-1111-111111111111", + "assay": "22222222-2222-2222-2222-222222222222", + "nodes": [ + { + "cells": [ + { + "uuid": "11111111-1111-1111-5555-000000000000", + "obj_cls": "GenericMaterial", + "value": "0818-N1" + } + ] + }, + { + "cells": [ + { + "uuid": null, + "value": "library preparation", + "uuid_ref": "22222222-2222-2222-bbbb-000000000000", + "header_name": "Protocol", + "header_type": "protocol", + "header_field": "col10", + "obj_cls": "Process" + } + ], + "headers": [ + "Protocol REF" + ] + }, + { + "cells": [ + { + "uuid": null, + "value": "0818-N1-DNA1", + "unit": "", + "header_name": "Name", + "header_type": "name", + "header_field": "col11", + "obj_cls": "GenericMaterial", + "item_type": "MATERIAL" + } + ], + "headers": [ + "Library Name" + ] + }, + { + "cells": [ + { + "uuid": "22222222-2222-2222-2222-000000000003", + "value": "nucleic acid sequencing", + "obj_cls": "Process" + } + ] + }, + { + "cells": [ + { + "uuid": null, + "value": "0818-N1-DNA1-WES1-R1.fastq.qz", + "unit": "", + "header_name": "Name", + "header_type": "name", + "header_field": "col14", + "obj_cls": "GenericMaterial", + "item_type": "DATA" + } + ], + "headers": [ + "Raw Data File" + ] + }, + { + "cells": [ + { + "uuid": null, + "value": "0818-N1-DNA1-WES1-R2.fastq.qz", + "unit": "", + "header_name": "Name", + "header_type": "name", + "header_field": "col15", + "obj_cls": "GenericMaterial", + "item_type": "DATA" + } + ], + "headers": [ + "Raw Data File" + ] + }, + { + "cells": [ + { + "uuid": null, + "value": "somatic variant calling-2", + "unit": "", + "header_name": "Data Transformation Name", + "header_type": "process_name", + "header_field": "col16", + "obj_cls": "Process" + } + ], + "headers": [ + "Data Transformation Name" + ] + }, + { + "cells": [ + { + "uuid": null, + "value": "new-somatic.vcf.gz", + "unit": "", + "header_name": "Name", + "header_type": "name", + "header_field": "col17", + "obj_cls": "GenericMaterial", + "item_type": "DATA" + } + ], + "headers": [ + "Derived Data File" + ] + } + ] +} \ No newline at end of file diff --git a/samplesheets/tests/test_views_ajax.py b/samplesheets/tests/test_views_ajax.py index 156c6c49..2a7a8ba0 100644 --- a/samplesheets/tests/test_views_ajax.py +++ b/samplesheets/tests/test_views_ajax.py @@ -76,7 +76,12 @@ STUDY_INSERT_PATH = EDIT_DIR + 'i_small_study_insert.json' ASSAY_INSERT_PATH = EDIT_DIR + 'i_small_assay_insert.json' ASSAY_INSERT_SPLIT_PATH = EDIT_DIR + 'i_small_assay_insert_split.json' -ASSAY_INSERT_POOL_PATH = EDIT_DIR + 'i_small_assay_insert_pool.json' +ASSAY_INSERT_POOL_MATERIAL_PATH = os.path.join( + EDIT_DIR, 'i_small_assay_insert_pool_material.json' +) +ASSAY_INSERT_POOL_PROCESS_PATH = os.path.join( + EDIT_DIR, 'i_small_assay_insert_pool_process.json' +) STUDY_DELETE_PATH = EDIT_DIR + 'i_small_study_delete.json' ASSAY_DELETE_PATH = EDIT_DIR + 'i_small_assay_delete.json' EDIT_SOURCE_NAME = '0818' @@ -184,7 +189,7 @@ def setUp(self): self.assay = self.study.assays.first() def test_get(self): - """Test GET for context retrieval with sample sheets""" + """Test SheetContextAjaxView GET""" with self.login(self.user): response = self.client.get( reverse( @@ -289,8 +294,8 @@ def test_get(self): } self.assertEqual(response_data, expected) - def test_no_sheets(self): - """Test context retrieval without sample sheets""" + def test_get_no_sheets(self): + """Test GET without sample sheets""" self.investigation.active = False self.investigation.save() @@ -345,7 +350,7 @@ def test_no_sheets(self): # TODO: Test anonymous request and irods_webdav_enabled - def test_delegate_min_owner(self): + def test_get_delegate_min_owner(self): """Test GET as delegate with owner minimum role""" app_settings.set( APP_NAME, @@ -363,7 +368,7 @@ def test_delegate_min_owner(self): self.assertEqual(response.status_code, 200) self.assertFalse(json.loads(response.json())['perms']['edit_config']) - def test_delegate_min_delegate(self): + def test_get_delegate_min_delegate(self): """Test GET as delegate with delegate minimum role""" app_settings.set( APP_NAME, @@ -381,7 +386,7 @@ def test_delegate_min_delegate(self): self.assertEqual(response.status_code, 200) self.assertTrue(json.loads(response.json())['perms']['edit_config']) - def test_irods_request_alert_owner(self): + def test_get_irods_request_alert_owner(self): """Test GET with active iRODS request alert as owner""" self.investigation.irods_status = True self.investigation.save() @@ -413,7 +418,7 @@ def test_irods_request_alert_owner(self): ), ) - def test_irods_request_alert_contributor(self): + def test_get_irods_request_alert_contributor(self): """Test GET with active iRODS request alert as contributor""" self.investigation.irods_status = True self.investigation.save() @@ -440,7 +445,7 @@ def test_irods_request_alert_contributor(self): response_data = json.loads(response.data) self.assertEqual(len(response_data['alerts']), 0) - def test_inherited_owner(self): + def test_get_inherited_owner(self): """Test GET as inherited owner""" # Set up category owner user_cat = self.make_user('user_cat') @@ -477,7 +482,7 @@ def setUp(self): self.cache_args = [APP_NAME, self.cache_name, self.project] def test_get(self): - """Test study tables retrieval""" + """Test StudyTablesAjaxView GET""" with self.login(self.user): response = self.client.get( reverse( @@ -513,7 +518,7 @@ def test_get(self): self.assertNotIn('edit_context', ret_data) def test_get_edit(self): - """Test study tables retrieval with edit mode enabled""" + """Test GET with edit mode enabled""" with self.login(self.user): response = self.client.get( reverse( @@ -549,7 +554,7 @@ def test_get_edit(self): self.assertIsNotNone(ret_data['edit_context']['protocols']) def test_get_study_cache(self): - """Test cached study table creation on retrieval""" + """Test GET for cached study table creation on retrieval""" self.assertIsNone(self.cache_backend.get_cache_item(*self.cache_args)) self.assertEqual(JSONCacheItem.objects.count(), 0) with self.login(self.user): @@ -566,7 +571,7 @@ def test_get_study_cache(self): self.assertEqual(JSONCacheItem.objects.count(), 1) def test_get_study_cache_existing(self): - """Test retrieval with existing cached study tables""" + """Test GET with existing cached study tables""" study_tables = table_builder.build_study_tables(self.study) cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid) self.cache_backend.set_cache_item( @@ -602,7 +607,7 @@ def setUp(self): self.assay = self.study.assays.first() def test_get(self): - """Test study links retrieval without plugin""" + """Test StudyLinksAjaxView GET without plugin""" with self.login(self.user): response = self.client.get( reverse( @@ -623,7 +628,7 @@ def setUp(self): self.investigation = self.import_isa_from_file(SHEET_PATH, self.project) def test_get(self): - """Test study tables retrieval""" + """Test SheetWarningsAjaxView GET""" with self.login(self.user): response = self.client.get( reverse( @@ -640,19 +645,6 @@ def test_get(self): class TestSheetCellEditAjaxView(SamplesheetsViewTestBase): """Tests for SheetCellEditAjaxView""" - def setUp(self): - super().setUp() - self.investigation = self.import_isa_from_file(SHEET_PATH, self.project) - self.study = self.investigation.studies.first() - # Set up POST data - self.values = {'updated_cells': []} - # Set up helpers - self.cache_backend = get_backend_api('sodar_cache') - self.cache_name = STUDY_TABLE_CACHE_ITEM.format( - study=self.study.sodar_uuid - ) - self.cache_args = [APP_NAME, self.cache_name, self.project] - @classmethod def _convert_ontology_value(cls, value): """ @@ -668,8 +660,21 @@ def _convert_ontology_value(cls, value): value['unit'] = None return value - def test_edit_name(self): - """Test editing the name of a material""" + def setUp(self): + super().setUp() + self.investigation = self.import_isa_from_file(SHEET_PATH, self.project) + self.study = self.investigation.studies.first() + # Set up POST data + self.values = {'updated_cells': []} + # Set up helpers + self.cache_backend = get_backend_api('sodar_cache') + self.cache_name = STUDY_TABLE_CACHE_ITEM.format( + study=self.study.sodar_uuid + ) + self.cache_args = [APP_NAME, self.cache_name, self.project] + + def test_post_material_name(self): + """Test SheetCellEditAjaxView POST with material name""" obj = GenericMaterial.objects.get(study=self.study, name='0816') new_name = '0816aaa' self.values['updated_cells'].append( @@ -694,8 +699,8 @@ def test_edit_name(self): obj.refresh_from_db() self.assertEqual(obj.name, new_name) - def test_edit_name_empty(self): - """Test setting an empty material name (should fail)""" + def test_post_material_name_empty(self): + """Test POST with empty material name (should fail)""" obj = GenericMaterial.objects.get(study=self.study, name='0816') self.values['updated_cells'].append( { @@ -719,8 +724,8 @@ def test_edit_name_empty(self): obj.refresh_from_db() self.assertEqual(obj.name, '0816') - def test_edit_performer(self): - """Test editing the performer of a process""" + def test_post_performer(self): + """Test POST with process performer""" obj = Process.objects.filter(study=self.study, assay=None).first() value = 'Alice Example ' self.values['updated_cells'].append( @@ -745,8 +750,8 @@ def test_edit_performer(self): obj.refresh_from_db() self.assertEqual(obj.performer, value) - def test_edit_perform_date(self): - """Test editing the perform date of a process""" + def test_post_perform_date(self): + """Test POST with process perform date""" obj = Process.objects.filter(study=self.study, assay=None).first() value = '2020-07-07' self.values['updated_cells'].append( @@ -771,8 +776,8 @@ def test_edit_perform_date(self): obj.refresh_from_db() self.assertEqual(obj.perform_date.strftime('%Y-%m-%d'), value) - def test_edit_perform_date_empty(self): - """Test editing the perform date of a process with an empty date""" + def test_post_perform_date_empty(self): + """Test POST with empty process perform date""" obj = Process.objects.filter(study=self.study, assay=None).first() value = '' self.values['updated_cells'].append( @@ -797,8 +802,8 @@ def test_edit_perform_date_empty(self): obj.refresh_from_db() self.assertIsNone(obj.perform_date) - def test_edit_perform_date_invalid(self): - """Test editing the perform date of a process with an invalid date""" + def test_post_perform_date_invalid(self): + """Test POST with invalid perform date (should fail)""" obj = Process.objects.filter(study=self.study, assay=None).first() og_date = obj.perform_date value = '2020-11-31' @@ -824,8 +829,8 @@ def test_edit_perform_date_invalid(self): obj.refresh_from_db() self.assertEqual(obj.perform_date, og_date) - def test_edit_characteristics_str(self): - """Test editing a characteristics string value in a material""" + def test_post_characteristics_str(self): + """Test POST with material characteristics string value""" obj = GenericMaterial.objects.get(study=self.study, name='0816') header_name = 'organism' self.assertNotEqual( @@ -858,8 +863,8 @@ def test_edit_characteristics_str(self): {'unit': None, 'value': EDIT_NEW_VALUE_STR}, ) - def test_edit_param_values_str(self): - """Test editing a parameter values string value in a process""" + def test_post_param_values_str(self): + """Test POST with process parameter values string value""" obj = Process.objects.filter(study=self.study, assay=None).first() header_name = 'instrument' self.assertNotEqual( @@ -892,8 +897,8 @@ def test_edit_param_values_str(self): {'unit': None, 'value': EDIT_NEW_VALUE_STR}, ) - def test_edit_protocol(self): - """Test editing the protocol reference of a process""" + def test_post_protocol(self): + """Test POST with process protocol reference""" obj = Process.objects.filter( study=self.study, unique_name__icontains='sample collection' ).first() @@ -926,8 +931,8 @@ def test_edit_protocol(self): obj.refresh_from_db() self.assertEqual(obj.protocol, new_protocol) - def test_edit_process_name(self): - """Test editing the name of a process""" + def test_post_process_name(self): + """Test POST with process name""" obj = Process.objects.filter( study=self.study, unique_name__icontains='sample collection' ).first() @@ -960,8 +965,8 @@ def test_edit_process_name(self): self.assertEqual(obj.name, name) self.assertEqual(obj.name_type, name_type) - def test_edit_ontology_term(self): - """Test editing an ontology term with a single term""" + def test_post_ontology_term(self): + """Test POST with single ontology term""" obj = GenericMaterial.objects.get(study=self.study, name='0817') name = 'organism' self.assertEqual(obj.characteristics[name], EMPTY_ONTOLOGY_VAL) @@ -1000,8 +1005,8 @@ def test_edit_ontology_term(self): obj.characteristics[name], self._convert_ontology_value(value) ) - def test_edit_ontology_term_replace(self): - """Test replacing an existing ontology term""" + def test_post_ontology_term_replace(self): + """Test POST to replace existing ontology term""" obj = GenericMaterial.objects.get(study=self.study, name='0815') name = 'organism' og_value = { @@ -1047,8 +1052,8 @@ def test_edit_ontology_term_replace(self): obj.characteristics[name], self._convert_ontology_value(value) ) - def test_edit_ontology_term_list(self): - """Test editing an ontology term with a list of terms""" + def test_post_ontology_term_list(self): + """Test POST with list of ontology terms""" obj = GenericMaterial.objects.get(study=self.study, name='0817') name = 'organism' value = [ @@ -1093,8 +1098,8 @@ def test_edit_ontology_term_list(self): obj.characteristics[name], self._convert_ontology_value(value) ) - def test_edit_ontology_term_empty(self): - """Test editing an ontology term with an empty value""" + def test_post_ontology_term_empty(self): + """Test POST with empty ontology term value""" obj = GenericMaterial.objects.get(study=self.study, name='0815') name = 'organism' og_value = { @@ -1128,8 +1133,8 @@ def test_edit_ontology_term_empty(self): obj.refresh_from_db() self.assertEqual(obj.characteristics[name], EMPTY_ONTOLOGY_VAL) - def test_edit_ontology_term_ref(self): - """Test ontology source ref updating when editing term value""" + def test_post_ontology_term_ref(self): + """Test POST for ontology source ref updating when editing term value""" # Set up ontology obo_doc = fastobo.load(OBO_PATH) ontology = OBOFormatOntologyIO().import_obo( @@ -1203,8 +1208,8 @@ def test_edit_ontology_term_ref(self): } self.assertEqual(ref, expected) - def test_edit_study_cache(self): - """Test editing with cached study tables""" + def test_post_study_cache(self): + """Test POST with cached study tables""" # Create cache item study_tables = table_builder.build_study_tables(self.study) cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid) @@ -1258,8 +1263,8 @@ def setUp(self): self.study = self.investigation.studies.first() self.values = {'updated_cells': []} - def test_edit_extract_label_string(self): - """Test updating the extract label field with a string value""" + def test_post_extract_label_string(self): + """Test SheetCellEditAjaxView POST with extract label and string value""" label = 'New label' name_type = th.LABELED_EXTRACT_NAME obj = GenericMaterial.objects.get( @@ -1316,8 +1321,8 @@ def setUp(self): ) self.cache_args = [APP_NAME, self.cache_name, self.project] - def test_insert_study_row(self): - """Test inserting a new row into a study""" + def test_post_study_row(self): + """Test SheetRowInsertAjaxView POST with study row""" self.assertIsNone( GenericMaterial.objects.filter( study=self.study, name='0818' @@ -1378,8 +1383,8 @@ def test_insert_study_row(self): self.assertEqual(sample.characteristics['status']['value'], '0') self.assertEqual(sample.factor_values['treatment']['value'], 'yes') - def test_insert_assay_row(self): - """Test inserting a new row into an assay""" + def test_post_assay_row(self): + """Test POST with assay row""" # Insert study row response = self.insert_row(path=STUDY_INSERT_PATH) self.assertEqual(response.status_code, 200) @@ -1430,8 +1435,8 @@ def test_insert_assay_row(self): for i in range(len(node_names) - 1): self.assertIn([node_names[i], node_names[i + 1]], self.assay.arcs) - def test_insert_assay_row_split(self): - """Test inserting a row with splitting into an assay""" + def test_post_assay_row_split(self): + """Test POST with assay row and splitting""" # Insert study and assay rows self.insert_row(path=STUDY_INSERT_PATH) sample = GenericMaterial.objects.get( @@ -1458,8 +1463,8 @@ def test_insert_assay_row_split(self): ) self.assertEqual(len(self.assay.arcs), arc_count + 1) - def test_insert_assay_row_pool(self): - """Test inserting a row with pooling into an assay""" + def test_post_assay_row_pool_material(self): + """Test POST with assay row and pooling by data file material""" self.insert_row(path=STUDY_INSERT_PATH) sample = GenericMaterial.objects.get( study=self.study, name=EDIT_SAMPLE_NAME @@ -1472,7 +1477,7 @@ def test_insert_assay_row_pool(self): mat_count = GenericMaterial.objects.filter(assay=self.assay).count() proc_count = Process.objects.filter(assay=self.assay).count() arc_count = len(self.assay.arcs) - response = self.insert_row(path=ASSAY_INSERT_POOL_PATH) + response = self.insert_row(path=ASSAY_INSERT_POOL_MATERIAL_PATH) self.assertEqual(response.status_code, 200) self.assay.refresh_from_db() @@ -1485,10 +1490,37 @@ def test_insert_assay_row_pool(self): ) self.assertEqual(len(self.assay.arcs), arc_count + 7) + def test_post_assay_row_pool_process(self): + """Test POST with assay row and pooling by named process""" + self.insert_row(path=STUDY_INSERT_PATH) + sample = GenericMaterial.objects.get( + study=self.study, name=EDIT_SAMPLE_NAME + ) + sample.sodar_uuid = EDIT_SAMPLE_UUID + sample.save() + self.insert_row(path=ASSAY_INSERT_PATH) + + self.update_assay_row_uuids(update_sample=False) + mat_count = GenericMaterial.objects.filter(assay=self.assay).count() + proc_count = Process.objects.filter(assay=self.assay).count() + arc_count = len(self.assay.arcs) + response = self.insert_row(path=ASSAY_INSERT_POOL_PROCESS_PATH) + + self.assertEqual(response.status_code, 200) + self.assay.refresh_from_db() + self.assertEqual( + GenericMaterial.objects.filter(assay=self.assay).count(), + mat_count + 4, + ) + self.assertEqual( + Process.objects.filter(assay=self.assay).count(), proc_count + 1 + ) + self.assertEqual(len(self.assay.arcs), arc_count + 7) + # TODO: Test ontology ref updating - def test_insert_study_cache(self): - """Test inserting new row with cached study tables""" + def test_post_study_cache(self): + """Test POST with cached study tables""" study_tables = table_builder.build_study_tables(self.study) cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid) self.cache_backend.set_cache_item( @@ -1552,8 +1584,8 @@ def setUp(self): ) self.cache_args = [APP_NAME, self.cache_name, self.project] - def test_delete_assay_row(self): - """Test row deletion from assay""" + def test_post_assay_row(self): + """Test SheetRowDeleteAjaxView POST with assay row""" response = self.delete_row(ASSAY_DELETE_PATH) self.assertEqual(response.status_code, 200) self.assay.refresh_from_db() @@ -1568,8 +1600,8 @@ def test_delete_assay_row(self): for n in self.row_names: self.assertNotIn(n, target_nodes) - def test_delete_study_row(self): - """Test row deletion from study""" + def test_post_study_row(self): + """Test POST with study row""" # First delete the assay row response = self.delete_row(ASSAY_DELETE_PATH) self.assertEqual(response.status_code, 200) @@ -1593,8 +1625,8 @@ def test_delete_study_row(self): ).first() ) - def test_delete_study_row_in_use(self): - """Test study row deletion with sample used in asssay (should fail)""" + def test_post_study_row_in_use(self): + """Test POST with study row containing sample used in asssay (should fail)""" response = self.delete_row(STUDY_DELETE_PATH) self.assertEqual(response.status_code, 500) self.assertIsNotNone( @@ -1615,8 +1647,8 @@ def test_delete_study_row_in_use(self): # TODO: Test deletion with splitting/pooling - def test_delete_assay_row_study_cache(self): - """Test row deletion from assay with cached study tables""" + def test_post_assay_row_study_cache(self): + """Test POST with assay row and cached study tables""" study_tables = table_builder.build_study_tables(self.study) cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid) self.cache_backend.set_cache_item( @@ -1771,8 +1803,8 @@ def setUp(self): ) self.cache_args = [APP_NAME, self.cache_name, self.project] - def test_update_study_column(self): - """Test posting a study column update""" + def test_post_study_column(self): + """Test SheetEditConfigAjaxView POST with study column""" sheet_config = app_settings.get( APP_NAME, 'sheet_config', project=self.project ) @@ -1823,8 +1855,8 @@ def test_update_study_column(self): 1, ) - def test_superuser_min_owner(self): - """Test updating as superuser with minimum role of owner""" + def test_post_superuser_min_owner(self): + """Test POST as superuser with minimum role of owner""" edit_config_min_role = app_settings.get( APP_NAME, 'edit_config_min_role', project=self.project ) @@ -1842,8 +1874,8 @@ def test_superuser_min_owner(self): ) self.assertEqual(response.status_code, 200) - def test_owner_min_owner(self): - """Test updating as owner with minimum=owner""" + def test_post_owner_min_owner(self): + """Test POST as owner with minimum=owner""" edit_config_min_role = app_settings.get( APP_NAME, 'edit_config_min_role', project=self.project ) @@ -1861,8 +1893,8 @@ def test_owner_min_owner(self): ) self.assertEqual(response.status_code, 200) - def test_delegate_min_owner(self): - """Test updating as delegate with minimum=owner (should fail)""" + def test_post_delegate_min_owner(self): + """Test POST as delegate with minimum=owner (should fail)""" edit_config_min_role = app_settings.get( APP_NAME, 'edit_config_min_role', project=self.project ) @@ -1884,8 +1916,8 @@ def test_delegate_min_owner(self): 'User not allowed to modify column config', ) - def test_contributor_min_owner(self): - """Test updating as contributor with minimum=owner (should fail)""" + def test_post_contributor_min_owner(self): + """Test POST as contributor with minimum=owner (should fail)""" edit_config_min_role = app_settings.get( APP_NAME, 'edit_config_min_role', project=self.project ) @@ -1907,8 +1939,8 @@ def test_contributor_min_owner(self): 'User not allowed to modify column config', ) - def test_inherited_owner(self): - """Test updating as inherited owner""" + def test_post_inherited_owner(self): + """Test POST as inherited owner""" with self.login(self.user_cat): response = self.client.post( reverse( @@ -1920,8 +1952,8 @@ def test_inherited_owner(self): ) self.assertEqual(response.status_code, 200) - def test_update_study_cache(self): - """Test posting a study column update with cached study tables""" + def test_post_study_cache(self): + """Test POST with study column and cached study tables""" study_tables = table_builder.build_study_tables(self.study) cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid) self.cache_backend.set_cache_item( @@ -1983,7 +2015,7 @@ def setUp(self): self.study_config = self.display_config['studies'][self.s_uuid] def test_post(self): - """Test updating the sheet configuration""" + """Test StudyDisplayConfigAjaxView POST""" self.assertEqual( self.study_config['nodes'][0]['fields'][2]['visible'], True ) @@ -2013,7 +2045,7 @@ def test_post(self): ) def test_post_default(self): - """Test updating along with the default configuration""" + """Test POST with default configuration""" self.assertEqual( self.study_config['nodes'][0]['fields'][2]['visible'], True ) @@ -2048,7 +2080,7 @@ def test_post_default(self): self.assertEqual(updated_config, default_config) def test_post_no_change(self): - """Test posting with no updates""" + """Test POST with no updates""" with self.login(self.user): response = self.client.post( reverse( @@ -2099,7 +2131,7 @@ def setUp(self): self.isa2.save() def test_get(self): - """Test GET returning diff data""" + """Test SheetVersionCompareAjaxView GET returning diff data""" expected = { 'studies': { 's_small2.txt': [ diff --git a/samplesheets/views_ajax.py b/samplesheets/views_ajax.py index a2c79e40..17ce0ad3 100644 --- a/samplesheets/views_ajax.py +++ b/samplesheets/views_ajax.py @@ -999,10 +999,11 @@ def _get_name(cls, node): """ if node['cells'][0]['obj_cls'] == 'Process': for cell in node['cells']: - if cell['header_type'] == 'process_name': + if cell.get('header_type') == 'process_name': return cell['value'] else: # Material return node['cells'][0]['value'] + return None @classmethod def _add_node_attr(cls, node_obj, cell): @@ -1191,8 +1192,15 @@ def _insert_row(self, row): ) ) + # TODO: We create duplicate rows with named processes, fix! + # Check if we'll need to consider collapsing of unnamed nodes - if len([n for n in row['nodes'] if not self._get_name(n)]) > 0: + # (References to existing nodes with UUID will not have to be collapsed) + if [ + n + for n in row['nodes'] + if not n['cells'][0].get('uuid') and not self._get_name(n) + ]: logger.debug('Unnamed node(s) in row, will attempt collapsing') collapse = True try: @@ -1390,6 +1398,8 @@ def post(self, request, *args, **kwargs): except Exception as ex: return Response({'detail': str(ex)}, status=500) except Exception as ex: + if settings.DEBUG: + raise (ex) return Response({'detail': str(ex)}, status=500) return Response(self.ok_data, status=200) diff --git a/samplesheets/vueapp/src/App.vue b/samplesheets/vueapp/src/App.vue index 039418c3..d649b61c 100644 --- a/samplesheets/vueapp/src/App.vue +++ b/samplesheets/vueapp/src/App.vue @@ -765,12 +765,12 @@ export default { let value = this.getDefaultValue(nextColId, gridOptions, true) // If default name suffix is set, fill name, enable node and continue + // TODO: Remove repetition with named process (see below) if (headerType === 'name' && !(['SOURCE', 'DATA'].includes(itemType)) && value.value) { value.value = this.getNamePrefix(rowNode, cols, startIdx) + value.value let createNew = true - // Check if name already appears in column, update UUID if found gridOptions.api.forEachNode(function (r) { if (r.data[nextColId].value === value.value) { @@ -778,7 +778,6 @@ export default { value.uuid = r.data[nextColId].uuid } }) - value.newInit = false value.editable = true rowNode.setDataValue(nextColId, value) @@ -831,12 +830,30 @@ export default { } else forceEmpty = true } else if (headerType === 'process_name') { // Fill process name with default suffix if set + // TODO: Remove repetition with material name const namePrefix = this.getNamePrefix(rowNode, cols, i) if (namePrefix && value.value) { value.value = namePrefix + value.value + let createNew = true + // Check if name already appears in column, update UUID if found + gridOptions.api.forEachNode(function (r) { + if (r.data[nextColId].value === value.value) { + createNew = false + value.uuid = r.data[nextColId].uuid + } + }) newInit = false value.newInit = false processActive = true + rowNode.setDataValue(nextColId, value) + this.handleNodeUpdate( + value, + cols[i], + rowNode, + gridOptions, + gridUuid, + createNew) + return } else value.value = '' // HACK: Reset default value if not filled value.editable = true // Process name should always be editable } else { @@ -859,14 +876,13 @@ export default { }, handleNodeUpdate ( - firstCellValue, column, rowNode, gridOptions, gridUuid, createNew + nameCellValue, column, rowNode, gridOptions, gridUuid, createNew ) { // console.log('handleNodeUpdate() called; colId=' + column.colId) // DEBUG const gridApi = gridOptions.api const columnApi = gridOptions.columnApi - const firstColId = column.colId // ID of the identifying node column + const nameColId = column.colId // ID of the identifying node column let assayMode = false - if (gridUuid in this.sodarContext.studies[this.currentStudyUuid].assays) { assayMode = true } @@ -878,23 +894,22 @@ export default { const studyCols = studyOptions.columnApi.getColumns() let studyCopyRow = null const sampleColId = this.sampleColId - // Get sample row from study table studyApi.forEachNode(function (rowNode) { if (!studyCopyRow && - rowNode.data[sampleColId].uuid === firstCellValue.uuid_ref) { + rowNode.data[sampleColId].uuid === nameCellValue.uuid_ref) { studyCopyRow = rowNode } }) - - // Fill in preceeding nodes + // Fill in preceeding nodes for sample for (let i = 1; i < this.sampleIdx; i++) { // TODO: Create a generic helper for copying const copyColId = studyCols[i].colId const copyData = Object.assign(studyCopyRow.data[copyColId]) copyData.newRow = true copyData.newInit = false - copyData.editable = columnApi.getColumn(copyColId).colDef.cellRendererParams.fieldEditable + copyData.editable = columnApi.getColumn( + copyColId).colDef.cellRendererParams.fieldEditable rowNode.setDataValue(copyColId, copyData) } } @@ -908,55 +923,64 @@ export default { let groupId = parent.groupId if (groupId === '1' && this.sourceColSpan > 1) groupId = '2' let startIdx - for (let i = 1; i < cols.length - 1; i++) { - if (cols[i].colId === firstColId) { + if (cols[i].colId === nameColId) { startIdx = i + 1 break } } - for (let i = startIdx; i < cols.length - 1; i++) { const col = cols[i] // NOTE: Must use originalParent to work with hidden columns if (col.originalParent.groupId === groupId) { nodeCols.push(col) - } else if (col.colId !== firstColId && + } else if (col.colId !== nameColId && col.originalParent.groupId !== groupId) { nextNodeStartIdx = i break } } - // IF node is new THEN fill out other cells with default/empty values + // If the node is new, fill out other cells with default/empty values if (createNew) { for (let i = 0; i < nodeCols.length; i++) { const newColId = nodeCols[i].colId const value = this.getDefaultValue(nodeCols[i].colId, gridOptions) rowNode.setDataValue(newColId, value) } - } else { // ELSE set UUIDs and update cell values (only in the same table) + } else { // Else set UUIDs and update cell values (only in the same table) let copyRowNode = null gridApi.forEachNode(function (rowNode) { - if (!copyRowNode && rowNode.data[firstColId].value === firstCellValue.value) { + if (!copyRowNode && + rowNode.data[nameColId].value === nameCellValue.value) { copyRowNode = rowNode } }) - for (let i = 0; i < nodeCols.length; i++) { const copyColId = nodeCols[i].colId const copyData = Object.assign(copyRowNode.data[copyColId]) copyData.newInit = false - copyData.editable = columnApi.getColumn(copyColId).colDef.cellRendererParams.fieldEditable + copyData.editable = columnApi.getColumn( + copyColId).colDef.cellRendererParams.fieldEditable rowNode.setDataValue(copyColId, copyData) } } + // When updating protocol name column, update preceeding protocol UUID + if (startIdx > 2) { + const headerType = column.colDef.cellEditorParams.headerInfo.header_type + const prevCol = cols[startIdx - 2] + const prevHeaderType = prevCol.colDef.cellEditorParams.headerInfo.header_type + if (headerType === 'process_name' && + prevHeaderType === 'protocol' && + cols[startIdx - 2].originalParent.groupId === groupId) { + rowNode.data[prevCol.colId].uuid = rowNode.data[nameColId].uuid + } + } // Enable the next node(s), if we are initializing node for the 1st time if (nextNodeStartIdx) { this.enableNextNodes(rowNode, gridOptions, gridUuid, nextNodeStartIdx) } - // Redraw row node for all changes to be displayed gridApi.redrawRows({ rows: [rowNode] }) }, @@ -1031,7 +1055,6 @@ export default { if (this.sourceColSpan > 1) lastSourceGroupIdx = 2 // 2nd source group const lastSourceGroupId = cols[lastSourceGroupIdx].originalParent.groupId let groupId = cols[1].originalParent.groupId - // Modify starting index and group id for assay table updating if (assayMode) { startIdx = this.sampleIdx @@ -1046,13 +1069,11 @@ export default { value.newInit = false value.editable = cols[i].colDef.cellRendererParams.fieldEditable rowNode.setDataValue(cols[i].colId, value) - // Save sample info if new sample was added in study if (!assayMode && cols[i].colId === this.sampleColId) { sampleUuid = data.node_uuids[nodeIdx] sampleName = rowNode.data[cols[i].colId].value } - if (i < cols.length - 2 && groupId !== cols[i + 1].originalParent.groupId && cols[i + 1].originalParent.groupId !== lastSourceGroupId) { @@ -1090,7 +1111,6 @@ export default { console.log('Row insert status: ' + data.detail) // DEBUG this.showNotification('Insert Failed', 'danger', 1000) } - finishCallback() this.updatingRow = false } diff --git a/samplesheets/vueapp/src/components/editors/DataCellEditor.vue b/samplesheets/vueapp/src/components/editors/DataCellEditor.vue index 63afc5d0..500323eb 100644 --- a/samplesheets/vueapp/src/components/editors/DataCellEditor.vue +++ b/samplesheets/vueapp/src/components/editors/DataCellEditor.vue @@ -88,6 +88,7 @@ export default Vue.extend({ nameValues: [], nameUuids: {}, sampleColId: null, + nameColumn: false, destroyCalled: false // HACK for issue #869 } }, @@ -138,7 +139,7 @@ export default Vue.extend({ return '' }, getValidState () { - if (this.headerInfo.header_type === 'name') { // Name is a special case + if (this.nameColumn) { // Name is a special case // TODO: Cleanup/simplify // NOTE: Empty value is allowed for DATA materials if ((this.editValue.length === 0 && @@ -233,7 +234,6 @@ export default Vue.extend({ created () { this.app = this.params.app this.gridOptions = this.app.getGridOptionsByUuid(this.params.gridUuid) - // Cancel editing if editingCell is true if (this.app.editingCell) { this.editAllowed = false @@ -248,18 +248,18 @@ export default Vue.extend({ this.isValueArray = true this.editValue = this.editValue.join('; ') } - if (this.value.value) { this.ogEditValue = Object.assign(this.value.value) } else { this.ogEditValue = null } - this.headerInfo = this.params.headerInfo this.renderInfo = this.params.renderInfo this.editConfig = this.params.editConfig this.sampleColId = this.params.sampleColId - + if (['name', 'process_name'].includes(this.headerInfo.header_type)) { + this.nameColumn = true + } // console.log('Edit colId/field: ' + this.params.colDef.field) // DEBUG // Set up unit value @@ -322,7 +322,7 @@ export default Vue.extend({ } // Special setup for the name column - if (this.headerInfo.header_type === 'name') { + if (this.nameColumn) { if (this.value.newRow) this.containerTitle = 'Enter name of new or existing node' else this.containerTitle = 'Rename node' // If name, get other current values for comparison in validation @@ -360,20 +360,6 @@ export default Vue.extend({ this.value.value[i] = this.value.value[i].trim() } } - // Check if we're in a named process without a protocol - let namedProcess = false - if (this.headerInfo.header_type === 'process_name') { - namedProcess = true - const groupId = this.params.column.originalParent.groupId - const cols = this.gridOptions.columnApi.getColumns() - for (let i = 1; i < cols.length - 1; i++) { - if (cols[i].originalParent.groupId === groupId && - cols[i].colDef.cellEditorParams.headerInfo.header_type === 'protocol') { - namedProcess = false - break - } - } - } // Reject invalid value if (!this.valid) { @@ -385,7 +371,7 @@ export default Vue.extend({ } // Confirm renaming node into an existing node and overwriting values - if ((this.headerInfo.header_type === 'name' || namedProcess) && + if ((this.nameColumn) && this.value.newRow && this.ogEditValue && this.nameValues.includes(this.editValue)) { @@ -402,12 +388,11 @@ export default Vue.extend({ } // Proceed with setting values and saving - if ((this.headerInfo.header_type === 'name' || namedProcess) && - (!this.value.uuid || this.value.newRow)) { + if (this.nameColumn && (!this.value.uuid || this.value.newRow)) { // Set UUID if we are referring to an existing node (only if material) - if (!namedProcess && this.nameValues.includes(this.value.value)) { + if (this.nameValues.includes(this.value.value)) { this.value.uuid = this.nameUuids[this.value.value] - } else if (!namedProcess) { + } else { // Clear UUID in case user switched from existing to new while editing this.value.uuid = null } @@ -419,6 +404,7 @@ export default Vue.extend({ else this.value.unit = this.editUnit // Handle updating/initiating node + // TODO: Ensure this works also if setting process name value to empty this.app.handleNodeUpdate( this.value, this.params.column, @@ -434,7 +420,6 @@ export default Vue.extend({ // Update cell (only if we already have the UUID!) if (this.value.uuid) { this.app.handleCellEdit(this.getUpdateData(), true) - // If a sample has been renamed, update sample list for assay if (this.headerInfo.header_type === 'name' && this.params.colDef.field === this.sampleColId) { diff --git a/samplesheets/vueapp/src/components/renderers/RowEditRenderer.vue b/samplesheets/vueapp/src/components/renderers/RowEditRenderer.vue index 62062a91..2afe0908 100644 --- a/samplesheets/vueapp/src/components/renderers/RowEditRenderer.vue +++ b/samplesheets/vueapp/src/components/renderers/RowEditRenderer.vue @@ -313,23 +313,17 @@ export default Vue.extend({ let groupId = cols[startIdx].originalParent.groupId let fieldIdx = 0 // Field index within node let nodeCells = [] - let nodeClass + // let nodeClass let nodeStartIdx = startIdx for (let i = startIdx; i < cols.length - 1; i++) { - if (fieldIdx === 0) { - nodeStartIdx = i - nodeClass = cols[i].colDef.cellEditorParams.headerInfo.obj_cls - } - + if (fieldIdx === 0) nodeStartIdx = i // Add cells for new nodes, only the first node for existing ones - if (nodeClass === 'Process' || - fieldIdx === 0 || + if (fieldIdx === 0 || (!this.rowNode.data[cols[i].colId].uuid && !(this.assayMode && inSampleNode))) { nodeCells.push(this.getCellData(cols[i])) } - if (i === cols.length - 1 || groupId !== cols[i + 1].originalParent.groupId) { groupId = cols[i + 1].originalParent.groupId diff --git a/samplesheets/vueapp/tests/unit/DataCellEditor.spec.js b/samplesheets/vueapp/tests/unit/DataCellEditor.spec.js index 3a7902bd..f658190b 100644 --- a/samplesheets/vueapp/tests/unit/DataCellEditor.spec.js +++ b/samplesheets/vueapp/tests/unit/DataCellEditor.spec.js @@ -1,6 +1,7 @@ import { createLocalVue, mount } from '@vue/test-utils' import { studyUuid, + assayUuid, copy, getAppStub, getSheetTableComponents, @@ -819,6 +820,53 @@ describe('DataCellEditor.vue', () => { expect(wrapper.find('.sodar-ss-data').text()).toBe('-') }) + it('updates value in process assay name column', async () => { + const value = '0815-N1-DNA1-WES1-UPDATED' + const table = copy(studyTablesOneCol).tables.assays[assayUuid] + table.field_header[0] = studyTablesEdit.tables.assays[assayUuid].field_header[13] + table.table_data[0][0] = copy(studyTablesEdit).tables.assays[assayUuid].table_data[0][13] + const wrapper = mountSheetTable({ + table: table, + editStudyConfig: { + nodes: [{ fields: [studyEditConfig.assays[assayUuid].nodes[2].fields[1]] }] + } + }) + await waitAG(wrapper) + await waitRAF() + + await wrapper.find('.sodar-ss-data-cell').trigger('dblclick') + const input = wrapper.find('.ag-cell-edit-input') + input.element.value = value + await input.trigger('input') + await gridOptions.api.stopEditing() + expect(wrapper.find('.ag-cell-edit-input').exists()).toBe(false) + expect(wrapper.find('.sodar-ss-data').text()).toBe(value) + }) + + it('updates process assay name column with empty value', async () => { + const value = '' // NOTE: For named processes this IS allowed + const table = copy(studyTablesOneCol).tables.assays[assayUuid] + table.field_header[0] = studyTablesEdit.tables.assays[assayUuid].field_header[13] + table.table_data[0][0] = copy(studyTablesEdit).tables.assays[assayUuid].table_data[0][13] + const wrapper = mountSheetTable({ + table: table, + editStudyConfig: { + nodes: [{ fields: [studyEditConfig.assays[assayUuid].nodes[2].fields[1]] }] + } + }) + await waitAG(wrapper) + await waitRAF() + + await wrapper.find('.sodar-ss-data-cell').trigger('dblclick') + const input = wrapper.find('.ag-cell-edit-input') + input.element.value = value + await input.trigger('input') + await gridOptions.api.stopEditing() + expect(wrapper.find('.ag-cell-edit-input').exists()).toBe(false) + expect(wrapper.find('.sodar-ss-data').text()).toBe('-') // Empty value + }) + + // TODO: Test renaming to assay name already existing in table for new/old row // TODO: Test update propagation for multiple rows // TODO: Test value updating for new rows }) From b0c0775fcc798a0d93ece6cd5fa4d6d808d27395 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Thu, 14 Mar 2024 13:43:09 +0100 Subject: [PATCH 31/33] fix assay table source name editing after row insert (#1928) --- CHANGELOG.rst | 1 + docs_manual/source/sodar_release_notes.rst | 1 + samplesheets/vueapp/src/App.vue | 10 ++++++++-- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 3c7c7733..e919a646 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -76,6 +76,7 @@ Fixed - ``get_pedigree_file_path()`` used in cancer study app tests (#1914) - IGV omit settings not correctly set on project creation (#1925) - Germline study cache build crash with no family column (#1921) + - Source name editing failing in assay table after row insert (#1928) - **Taskflowbackend** - Hardcoded iRODS path length in ``landing_zone_move`` (#1888) - Uncaught exceptions in ``SetAccessTask`` (#1906) diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index f5ccc1d7..a9f1b039 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -24,6 +24,7 @@ Release for minor updates, maintenance and bug fixes. - Fix iRODS stats badge stuck in "updating" - Fix landing zone status updating not working in project details card - Fix landing zone creation crash with large amount of created collections +- Fix multiple sheet editor issues - Minor updates and bug fixes - Upgrade to SODAR Core v0.13.4 diff --git a/samplesheets/vueapp/src/App.vue b/samplesheets/vueapp/src/App.vue index d649b61c..50d271a7 100644 --- a/samplesheets/vueapp/src/App.vue +++ b/samplesheets/vueapp/src/App.vue @@ -1055,10 +1055,16 @@ export default { if (this.sourceColSpan > 1) lastSourceGroupIdx = 2 // 2nd source group const lastSourceGroupId = cols[lastSourceGroupIdx].originalParent.groupId let groupId = cols[1].originalParent.groupId - // Modify starting index and group id for assay table updating - if (assayMode) { + + if (assayMode) { // Assay table modifications + // Modify starting index and group id for assay table startIdx = this.sampleIdx groupId = cols[startIdx].originalParent.groupId + // Update newRow for source cells + for (let i = 1; i < startIdx; i++) { + const value = rowNode.data[cols[i].colId] + value.newRow = false + } } // Set cell data to match an existing row From 863b1b8edb6d0eabbcb98249ce91682fa853a5a1 Mon Sep 17 00:00:00 2001 From: Harald Wilhelmi Date: Thu, 14 Mar 2024 13:55:17 +0100 Subject: [PATCH 32/33] add django settings for reverse proxy setup (#1917) Co-authored-by: Harald Wilhelmi --- config/settings/base.py | 1 + config/settings/production.py | 3 +++ 2 files changed, 4 insertions(+) diff --git a/config/settings/base.py b/config/settings/base.py index 091a5791..459da0b6 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -35,6 +35,7 @@ # ------------------------------------------------------------------------------ # Hosts/domain names that are valid for this site ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['*']) +USE_X_FORWARDED_HOST = env.bool('DJANGO_USE_X_FORWARDED_HOST', False) # APP CONFIGURATION # ------------------------------------------------------------------------------ diff --git a/config/settings/production.py b/config/settings/production.py index 9d86f543..961a3268 100644 --- a/config/settings/production.py +++ b/config/settings/production.py @@ -43,6 +43,9 @@ ) CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True +CSRF_TRUSTED_ORIGINS = env.list('DJANGO_CSRF_TRUSTED_ORIGINS', default=[]) +CSRF_COOKIE_DOMAIN = env.str('DJANGO_CSRF_COOKIE_DOMAIN', default=None) + X_FRAME_OPTIONS = 'DENY' INSTALLED_APPS += ['gunicorn'] From b38e158896c83610a667cc7a2edd7c432bccc306 Mon Sep 17 00:00:00 2001 From: Mikko Nieminen Date: Fri, 15 Mar 2024 10:26:22 +0100 Subject: [PATCH 33/33] cleanup and prepare v0.14.2 release (#1861) --- CHANGELOG.rst | 8 +- config/settings/base.py | 7 +- config/settings/local.py | 4 +- config/settings/production.py | 8 +- docs_manual/source/api_documentation.rst | 2 +- docs_manual/source/api_examples.rst | 4 +- docs_manual/source/api_irodsinfo.rst | 2 +- docs_manual/source/api_landingzones.rst | 2 +- docs_manual/source/api_samplesheets.rst | 2 +- docs_manual/source/conf.py | 4 +- docs_manual/source/dev_guide.rst | 10 +- docs_manual/source/sodar_release_notes.rst | 4 +- landingzones/tests/test_ui.py | 70 ++++++++++++++ landingzones/views.py | 19 +--- landingzones/views_ajax.py | 4 - landingzones/views_api.py | 1 - requirements/base.txt | 2 +- .../samplesheets/samplesheet_import_form.html | 16 +--- samplesheets/views.py | 33 ++----- samplesheets/views_ajax.py | 4 +- samplesheets/views_api.py | 2 - samplesheets/vueapp/package-lock.json | 4 +- samplesheets/vueapp/package.json | 2 +- samplesheets/vueapp/src/App.vue | 3 +- taskflowbackend/flows/landing_zone_delete.py | 2 +- taskflowbackend/flows/landing_zone_move.py | 2 +- taskflowbackend/tests/test_flows.py | 96 +++++++------------ 27 files changed, 163 insertions(+), 154 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e919a646..cc256490 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,12 +5,14 @@ Changelog for the SODAR project. Loosely follows the `Keep a Changelog `_ guidelines. -Unreleased -========== +v0.14.2 (2024-03-15) +==================== Added ----- +- **General** + - Django settings for reverse proxy setup (#1917) - **Irodsbackend** - Sanitize and validate ``IRODS_ROOT_PATH`` in ``get_root_path()`` (#1891) - **Landingzones** @@ -32,7 +34,7 @@ Changed ------- - **General** - - Upgrade to Django v3.2.24 (#1854) + - Upgrade to Django v3.2.25 (#1854) - Upgrade to django-sodar-core v0.13.4 (#1899) - Upgrade critical Vue app dependencies (#1854) - Upgrade to cubi-isa-templates v0.1.2 (#1854) diff --git a/config/settings/base.py b/config/settings/base.py index 459da0b6..6fec8a55 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -22,7 +22,7 @@ env = environ.Env() # .env file, should load only in development environment -READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False) +READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', False) if READ_DOT_ENV_FILE: # Operating System Environment variables have precedence over variables @@ -157,7 +157,7 @@ # See: https://docs.djangoproject.com/en/3.2/ref/settings/#databases # Uses django-environ to accept uri format # See: https://django-environ.readthedocs.io/en/latest/#supported-types -DATABASES = {'default': env.db('DATABASE_URL', default='postgres:///sodar')} +DATABASES = {'default': env.db('DATABASE_URL', 'postgres:///sodar')} DATABASES['default']['ATOMIC_REQUESTS'] = False # Set default auto field (for Django 3.2+) @@ -607,7 +607,7 @@ def set_logging(level=None): # General API settings -SODAR_API_DEFAULT_VERSION = '0.14.1' +SODAR_API_DEFAULT_VERSION = '0.14.2' SODAR_API_ALLOWED_VERSIONS = [ '0.7.0', '0.7.1', @@ -628,6 +628,7 @@ def set_logging(level=None): '0.13.4', '0.14.0', '0.14.1', + '0.14.2', ] SODAR_API_MEDIA_TYPE = 'application/vnd.bihealth.sodar+json' SODAR_API_DEFAULT_HOST = env.url( diff --git a/config/settings/local.py b/config/settings/local.py index 74ce43d2..7ffc301b 100644 --- a/config/settings/local.py +++ b/config/settings/local.py @@ -16,7 +16,7 @@ # DEBUG # ------------------------------------------------------------------------------ -DEBUG = env.bool('DJANGO_DEBUG', default=True) +DEBUG = env.bool('DJANGO_DEBUG', True) TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # SECRET CONFIGURATION @@ -53,7 +53,7 @@ # django-debug-toolbar # ------------------------------------------------------------------------------ -ENABLE_DEBUG_TOOLBAR = env.bool('ENABLE_DEBUG_TOOLBAR', default=True) +ENABLE_DEBUG_TOOLBAR = env.bool('ENABLE_DEBUG_TOOLBAR', True) if ENABLE_DEBUG_TOOLBAR: MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware'] diff --git a/config/settings/production.py b/config/settings/production.py index 961a3268..af45a4b9 100644 --- a/config/settings/production.py +++ b/config/settings/production.py @@ -28,15 +28,15 @@ SECURE_HSTS_SECONDS = 60 SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool( - 'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True + 'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', True ) SECURE_CONTENT_TYPE_NOSNIFF = env.bool( - 'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True + 'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', True ) SECURE_BROWSER_XSS_FILTER = True SESSION_COOKIE_SECURE = True SESSION_COOKIE_HTTPONLY = True -SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True) +SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', True) SECURE_REDIRECT_EXEMPT = env.list( 'DJANGO_SECURE_REDIRECT_EXEMPT', default=['/taskflow/', r'^irodsbackend/api/auth$'], @@ -44,7 +44,7 @@ CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True CSRF_TRUSTED_ORIGINS = env.list('DJANGO_CSRF_TRUSTED_ORIGINS', default=[]) -CSRF_COOKIE_DOMAIN = env.str('DJANGO_CSRF_COOKIE_DOMAIN', default=None) +CSRF_COOKIE_DOMAIN = env.str('DJANGO_CSRF_COOKIE_DOMAIN', None) X_FRAME_OPTIONS = 'DENY' diff --git a/docs_manual/source/api_documentation.rst b/docs_manual/source/api_documentation.rst index b3dbf395..0c421193 100644 --- a/docs_manual/source/api_documentation.rst +++ b/docs_manual/source/api_documentation.rst @@ -45,7 +45,7 @@ expected version. .. code-block:: console - Accept: application/vnd.bihealth.sodar+json; version=0.14.0 + Accept: application/vnd.bihealth.sodar+json; version=0.14.2 Specific sections of the SODAR API may require their own accept header. See the exact header requirement in the respective documentation on each section of the diff --git a/docs_manual/source/api_examples.rst b/docs_manual/source/api_examples.rst index 9b8ce3bf..205e0052 100644 --- a/docs_manual/source/api_examples.rst +++ b/docs_manual/source/api_examples.rst @@ -41,9 +41,9 @@ the SODAR API: # Token authorization header (required) auth_header = {'Authorization': 'token {}'.format(api_token)} # Use core_headers for project management API endpoints - core_headers = {**auth_header, 'Accept': 'application/vnd.bihealth.sodar-core+json; version=0.13.3'} + core_headers = {**auth_header, 'Accept': 'application/vnd.bihealth.sodar-core+json; version=0.13.4'} # Use sodar_headers for sample sheet and landing zone API endpoints - sodar_headers = {**auth_header, 'Accept': 'application/vnd.bihealth.sodar+json; version=0.14.1'} + sodar_headers = {**auth_header, 'Accept': 'application/vnd.bihealth.sodar+json; version=0.14.2'} .. note:: diff --git a/docs_manual/source/api_irodsinfo.rst b/docs_manual/source/api_irodsinfo.rst index a2e936fb..3234e4d3 100644 --- a/docs_manual/source/api_irodsinfo.rst +++ b/docs_manual/source/api_irodsinfo.rst @@ -22,4 +22,4 @@ SODAR version: .. code-block:: console - Accept: application/vnd.bihealth.sodar+json; version=0.14.1 + Accept: application/vnd.bihealth.sodar+json; version=0.14.2 diff --git a/docs_manual/source/api_landingzones.rst b/docs_manual/source/api_landingzones.rst index a28ba97a..024e27f4 100644 --- a/docs_manual/source/api_landingzones.rst +++ b/docs_manual/source/api_landingzones.rst @@ -32,4 +32,4 @@ SODAR version: .. code-block:: console - Accept: application/vnd.bihealth.sodar+json; version=0.14.1 + Accept: application/vnd.bihealth.sodar+json; version=0.14.2 diff --git a/docs_manual/source/api_samplesheets.rst b/docs_manual/source/api_samplesheets.rst index c91fca3b..401166c6 100644 --- a/docs_manual/source/api_samplesheets.rst +++ b/docs_manual/source/api_samplesheets.rst @@ -68,4 +68,4 @@ SODAR version: .. code-block:: console - Accept: application/vnd.bihealth.sodar+json; version=0.14.1 + Accept: application/vnd.bihealth.sodar+json; version=0.14.2 diff --git a/docs_manual/source/conf.py b/docs_manual/source/conf.py index 8360d084..a604f6d4 100644 --- a/docs_manual/source/conf.py +++ b/docs_manual/source/conf.py @@ -22,11 +22,11 @@ # -- Project information ----------------------------------------------------- project = 'SODAR' -copyright = '2018-2023, BIH Core Unit Bioinformatics' +copyright = '2018-2024, BIH Core Unit Bioinformatics' author = 'BIH Core Unit Bioinformatics' # The full version, including alpha/beta/rc tags -release = '0.14.2-WIP' +release = '0.14.2' # -- General configuration --------------------------------------------------- diff --git a/docs_manual/source/dev_guide.rst b/docs_manual/source/dev_guide.rst index 067c9526..c106065a 100644 --- a/docs_manual/source/dev_guide.rst +++ b/docs_manual/source/dev_guide.rst @@ -16,12 +16,14 @@ for development and is always the latest "bleeding edge" version of SODAR. The When naming your work branches, prefix them with the issue ID, preferably followed by a verb depicting the action: "add", "update", "fix", "remove", -"refactor", "upgrade", "deprecate" or something else if none of these ar -applicable. +"refactor", "upgrade", "deprecate" or something else if none of these are +applicable. If a relevant issue does not exist yet, please create one in the +repository's issue tracker. This will make changes and pull requests much easier +to track. The rest of the branch name should *concisely* represent the change. It is not necessary (and often not recommended) to include the entire name of the issue -as they may be verbose. +as they may be overly verbose. If a branch and pull request tackles multiple issues at once, including the ID of the most major issue is enough. @@ -49,7 +51,7 @@ Pull Requests Please add the related issue ID(s) to the title of your pull request and ensure the pull request is set against the ``dev`` branch. -It is strongly recommended to use descriptive commit messages even in work +It is strongly recommended to use descriptive commit messages even for work commits that are to be squashed in merging. This will aid the review process. Before submitting a pull request for review, ensure the following: diff --git a/docs_manual/source/sodar_release_notes.rst b/docs_manual/source/sodar_release_notes.rst index a9f1b039..d8b3d215 100644 --- a/docs_manual/source/sodar_release_notes.rst +++ b/docs_manual/source/sodar_release_notes.rst @@ -8,8 +8,8 @@ list of changes in current and previous releases, see the :ref:`full changelog`. -v0.14.2 (WIP) -============= +v0.14.2 (2024-03-15) +==================== Release for minor updates, maintenance and bug fixes. diff --git a/landingzones/tests/test_ui.py b/landingzones/tests/test_ui.py index 56c6a038..637cf1be 100644 --- a/landingzones/tests/test_ui.py +++ b/landingzones/tests/test_ui.py @@ -20,6 +20,7 @@ ZONE_STATUS_CREATING, ZONE_STATUS_ACTIVE, ZONE_STATUS_VALIDATING, + ZONE_STATUS_DELETED, ) from landingzones.tests.test_models import LandingZoneMixin @@ -325,6 +326,75 @@ def test_status_update(self): self._wait_for_status(zone_status, ZONE_STATUS_VALIDATING) self.assertEqual(zone_status.text, ZONE_STATUS_VALIDATING) + def test_stats_deleted_owner(self): + """Test ProjectZoneView stats badge on DELETED zone as owner""" + self._setup_investigation() + self.investigation.irods_status = True + self.investigation.save() + zone = self.make_landing_zone( + 'contrib_zone', + self.project, + self.user_contributor, + self.assay, + status=ZONE_STATUS_ACTIVE, + ) + self.login_and_redirect(self.user_contributor, self.url) + + zone_status = self.selenium.find_element( + By.CLASS_NAME, 'sodar-lz-zone-status' + ) + zone_status_info = self.selenium.find_element( + By.CLASS_NAME, 'sodar-lz-zone-status-info' + ) + self._wait_for_status(zone_status, ZONE_STATUS_ACTIVE) + self.assertTrue( + zone_status_info.find_element( + By.CLASS_NAME, 'sodar-irods-stats' + ).is_displayed() + ) + # Update status to deleted, stats badge should no longer be rendered + zone.set_status(ZONE_STATUS_DELETED) + self._wait_for_status(zone_status, ZONE_STATUS_DELETED) + self.assertFalse( + zone_status_info.find_element( + By.CLASS_NAME, 'sodar-irods-stats' + ).is_displayed() + ) + + def test_stats_deleted_superuser(self): + """Test ProjectZoneView stats badge on DELETED zone as superuser""" + self._setup_investigation() + self.investigation.irods_status = True + self.investigation.save() + zone = self.make_landing_zone( + 'contrib_zone', + self.project, + self.user_contributor, + self.assay, + status=ZONE_STATUS_ACTIVE, + ) + self.login_and_redirect(self.superuser, self.url) + + zone_status = self.selenium.find_element( + By.CLASS_NAME, 'sodar-lz-zone-status' + ) + zone_status_info = self.selenium.find_element( + By.CLASS_NAME, 'sodar-lz-zone-status-info' + ) + self._wait_for_status(zone_status, ZONE_STATUS_ACTIVE) + self.assertTrue( + zone_status_info.find_element( + By.CLASS_NAME, 'sodar-irods-stats' + ).is_displayed() + ) + zone.set_status(ZONE_STATUS_DELETED) + self._wait_for_status(zone_status, ZONE_STATUS_DELETED) + self.assertFalse( + zone_status_info.find_element( + By.CLASS_NAME, 'sodar-irods-stats' + ).is_displayed() + ) + def test_zone_buttons(self): """Test ProjectZoneView zone buttons""" self._setup_investigation() diff --git a/landingzones/views.py b/landingzones/views.py index 53356ed5..1308d69c 100644 --- a/landingzones/views.py +++ b/landingzones/views.py @@ -32,7 +32,6 @@ STATUS_ALLOW_UPDATE, STATUS_FINISHED, STATUS_INFO_DELETE_NO_COLL, - ZONE_STATUS_OK, ZONE_STATUS_DELETED, ) from landingzones.forms import LandingZoneForm @@ -174,9 +173,7 @@ def submit_create( ) tl_event.add_object(obj=zone, label='zone', name=zone.title) tl_event.add_object( - obj=zone.user, - label='user', - name=zone.user.username, + obj=zone.user, label='user', name=zone.user.username ) tl_event.add_object( obj=zone.assay, label='assay', name=zone.assay.get_name() @@ -268,15 +265,11 @@ def update_zone(self, zone, request=None): user=user, event_name='zone_update', description=description, - status_type=ZONE_STATUS_OK, + status_type='OK', extra_data=tl_extra, ) tl_event.add_object(obj=zone, label='zone', name=zone.title) - tl_event.add_object( - obj=user, - label='user', - name=user.username, - ) + tl_event.add_object(obj=user, label='user', name=user.username) tl_event.add_object( obj=zone.assay, label='assay', name=zone.assay.get_name() ) @@ -329,11 +322,7 @@ def submit_delete(self, zone): if zone_exists: # Submit with taskflow flow_name = 'landing_zone_delete' flow_data = self.get_flow_data( - zone, - flow_name, - { - 'zone_uuid': str(zone.sodar_uuid), - }, + zone, flow_name, {'zone_uuid': str(zone.sodar_uuid)} ) taskflow.submit( project=project, diff --git a/landingzones/views_ajax.py b/landingzones/views_ajax.py index 6e02698f..2979c115 100644 --- a/landingzones/views_ajax.py +++ b/landingzones/views_ajax.py @@ -24,21 +24,17 @@ def check_zone_permission(self, zone, user): def post(self, request, *args, **kwargs): zone_uuids = request.data.getlist('zone_uuids[]') project = self.get_project() - # Filter landing zones based on UUIDs and project zones = LandingZone.objects.filter( sodar_uuid__in=zone_uuids, project=project ) - status_dict = {} for zone in zones: # Check permissions if not self.check_zone_permission(zone, self.request.user): continue - status_dict[str(zone.sodar_uuid)] = { 'status': zone.status, 'status_info': zone.status_info, } - return Response(status_dict, status=200) diff --git a/landingzones/views_api.py b/landingzones/views_api.py index 57836422..3dda7365 100644 --- a/landingzones/views_api.py +++ b/landingzones/views_api.py @@ -280,7 +280,6 @@ def perform_update(self, serializer): if not self._validate_update_fields(serializer): # Should raise 400 Bad Request raise ValidationError('{}Invalid update fields'.format(ex_msg)) - # If all is OK, go forward with object update and taskflow submission super().perform_update(serializer) try: diff --git a/requirements/base.txt b/requirements/base.txt index ffa20142..a77640a9 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ wheel==0.40.0 setuptools==67.6.0 # Django -django==3.2.24 +django==3.2.25 # Configuration django-environ==0.10.0 diff --git a/samplesheets/templates/samplesheets/samplesheet_import_form.html b/samplesheets/templates/samplesheets/samplesheet_import_form.html index 2b2bec86..de71ee98 100644 --- a/samplesheets/templates/samplesheets/samplesheet_import_form.html +++ b/samplesheets/templates/samplesheets/samplesheet_import_form.html @@ -91,17 +91,11 @@

href="{% url 'samplesheets:project_sheets' project=project.sodar_uuid %}"> Cancel - {% if replace_sheets %} - - {% else %} - - {% endif %} + diff --git a/samplesheets/views.py b/samplesheets/views.py index d2ae8daf..27272413 100644 --- a/samplesheets/views.py +++ b/samplesheets/views.py @@ -686,15 +686,10 @@ def create_colls(self, investigation, request=None, sync=False): 'ticket_str': ticket_str, } taskflow.submit( - project=project, - flow_name='sheet_colls_create', - flow_data=flow_data, + project=project, flow_name='sheet_colls_create', flow_data=flow_data ) app_settings.set( - APP_NAME, - 'public_access_ticket', - ticket_str, - project=project, + APP_NAME, 'public_access_ticket', ticket_str, project=project ) if tl_event: tl_event.set_status('OK') @@ -1472,12 +1467,8 @@ def get_context_data(self, *args, **kwargs): def get_form_kwargs(self): """Pass kwargs to form""" kwargs = super().get_form_kwargs() - kwargs.update( - { - 'project': self.get_project(), - 'sheet_tpl': self._get_sheet_template(), - } - ) + kwargs['project'] = self.get_project() + kwargs['sheet_tpl'] = self._get_sheet_template() return kwargs def form_valid(self, form): @@ -1744,9 +1735,7 @@ def post(self, request, *args, **kwargs): tl_event.set_status('SUBMIT') try: taskflow.submit( - project=project, - flow_name='sheet_delete', - flow_data={}, + project=project, flow_name='sheet_delete', flow_data={} ) except taskflow.FlowSubmitException as ex: delete_success = False @@ -2184,17 +2173,14 @@ def post(self, request, **kwargs): status_type='OK', ) tl_event.add_object( - obj=sv, - label='isatab', - name=sv.get_full_name(), + obj=sv, label='isatab', name=sv.get_full_name() ) context['sheet_versions'].delete() messages.success( request, 'Deleted {} sample sheet version{}.'.format( - version_count, - 's' if version_count != 1 else '', + version_count, 's' if version_count != 1 else '' ), ) return redirect( @@ -2840,10 +2826,7 @@ def post(self, request, *args, **kwargs): try: batch = self.get_irods_request_objects() if not batch: - messages.error( - self.request, - NO_REQUEST_MSG, - ) + messages.error(self.request, NO_REQUEST_MSG) return redirect( reverse( 'samplesheets:irods_requests', diff --git a/samplesheets/views_ajax.py b/samplesheets/views_ajax.py index 17ce0ad3..cab0f0a7 100644 --- a/samplesheets/views_ajax.py +++ b/samplesheets/views_ajax.py @@ -544,9 +544,7 @@ def _get_display_config(self, investigation, user, sheet_config=None): 'using default..'.format(user.username) ) display_config = app_settings.get( - APP_NAME, - 'display_config_default', - project=project, + APP_NAME, 'display_config_default', project=project ) # If default display configuration is not found, build it diff --git a/samplesheets/views_api.py b/samplesheets/views_api.py index 5506515a..7b50f8ef 100644 --- a/samplesheets/views_api.py +++ b/samplesheets/views_api.py @@ -185,14 +185,12 @@ def get(self, request, *args, **kwargs): ).first() if not investigation: raise NotFound() - export_format = 'json' if self.request.get_full_path() == reverse( 'samplesheets:api_export_zip', kwargs={'project': project.sodar_uuid}, ): export_format = 'zip' - try: return self.get_isa_export(project, request, export_format) except Exception as ex: diff --git a/samplesheets/vueapp/package-lock.json b/samplesheets/vueapp/package-lock.json index ac9dc64f..2d26489c 100644 --- a/samplesheets/vueapp/package-lock.json +++ b/samplesheets/vueapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "samplesheets", - "version": "0.14.2-WIP", + "version": "0.14.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "samplesheets", - "version": "0.14.2-WIP", + "version": "0.14.2", "dependencies": { "bootstrap-vue": "^2.22.0", "core-js": "^3.23.5", diff --git a/samplesheets/vueapp/package.json b/samplesheets/vueapp/package.json index ec7403bc..c3f264f6 100644 --- a/samplesheets/vueapp/package.json +++ b/samplesheets/vueapp/package.json @@ -1,6 +1,6 @@ { "name": "samplesheets", - "version": "0.14.2-WIP", + "version": "0.14.2", "private": true, "scripts": { "serve": "vue-cli-service serve", diff --git a/samplesheets/vueapp/src/App.vue b/samplesheets/vueapp/src/App.vue index 50d271a7..497698d5 100644 --- a/samplesheets/vueapp/src/App.vue +++ b/samplesheets/vueapp/src/App.vue @@ -1062,8 +1062,7 @@ export default { groupId = cols[startIdx].originalParent.groupId // Update newRow for source cells for (let i = 1; i < startIdx; i++) { - const value = rowNode.data[cols[i].colId] - value.newRow = false + rowNode.data[cols[i].colId].newRow = false } } diff --git a/taskflowbackend/flows/landing_zone_delete.py b/taskflowbackend/flows/landing_zone_delete.py index 39fa04ed..373e9ea6 100644 --- a/taskflowbackend/flows/landing_zone_delete.py +++ b/taskflowbackend/flows/landing_zone_delete.py @@ -54,7 +54,7 @@ def build(self, force_fail=False): inject={'path': zone_path}, ) ) - # Set zone status to DELETING + # Set zone status to DELETED self.add_task( lz_tasks.SetLandingZoneStatusTask( name='Set landing zone status to DELETED', diff --git a/taskflowbackend/flows/landing_zone_move.py b/taskflowbackend/flows/landing_zone_move.py index 95aa5ee5..9e40d771 100644 --- a/taskflowbackend/flows/landing_zone_move.py +++ b/taskflowbackend/flows/landing_zone_move.py @@ -16,7 +16,7 @@ SAMPLE_COLL = settings.IRODS_SAMPLE_COLL -ZONE_INFO_CALC = 'Finding and calculating missing checksums in iRODS' +ZONE_INFO_CALC = 'Calculating missing checksums in iRODS' ZONE_INFO_VALIDATE = 'Validating {count} file{plural}' ZONE_INFO_READ_ONLY = ', write access disabled' diff --git a/taskflowbackend/tests/test_flows.py b/taskflowbackend/tests/test_flows.py index 130f3a84..d455ec73 100644 --- a/taskflowbackend/tests/test_flows.py +++ b/taskflowbackend/tests/test_flows.py @@ -670,8 +670,7 @@ def test_move(self): ) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - False, + self.irods.data_objects.exists(sample_obj_path + '.md5'), False ) flow_data = {'zone_uuid': str(self.zone.sodar_uuid)} @@ -694,18 +693,13 @@ def test_move(self): ) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - True, + self.irods.data_objects.exists(sample_obj_path + '.md5'), True ) self.assert_irods_access( - self.group_name, - sample_obj_path, - self.irods_access_read, + self.group_name, sample_obj_path, self.irods_access_read ) self.assert_irods_access( - self.group_name, - sample_obj_path + '.md5', - self.irods_access_read, + self.group_name, sample_obj_path + '.md5', self.irods_access_read ) def test_move_no_checksum(self): @@ -728,8 +722,7 @@ def test_move_no_checksum(self): ) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - False, + self.irods.data_objects.exists(sample_obj_path + '.md5'), False ) self.assertIsNone(obj.replicas[0].checksum) @@ -748,8 +741,7 @@ def test_move_no_checksum(self): self.assertEqual(self.irods.collections.exists(self.zone_path), False) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - True, + self.irods.data_objects.exists(sample_obj_path + '.md5'), True ) obj = self.irods.data_objects.get(sample_obj_path) # Reload object self.assertIsNotNone(obj.replicas[0].checksum) @@ -821,8 +813,7 @@ def test_move_coll_exists(self): sample_obj_path = os.path.join(self.sample_path, COLL_NAME, OBJ_NAME) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - True, + self.irods.data_objects.exists(sample_obj_path + '.md5'), True ) def test_move_obj_exists(self): @@ -958,8 +949,7 @@ def test_revert(self): sample_obj_path = os.path.join(self.sample_path, COLL_NAME, OBJ_NAME) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - False, + self.irods.data_objects.exists(sample_obj_path + '.md5'), False ) self.assert_irods_access( self.user.username, zone_coll, IRODS_ACCESS_OWN @@ -1006,8 +996,7 @@ def test_move_restrict(self): ) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - False, + self.irods.data_objects.exists(sample_obj_path + '.md5'), False ) flow_data = {'zone_uuid': str(new_zone.sodar_uuid)} @@ -1031,18 +1020,13 @@ def test_move_restrict(self): ) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - True, + self.irods.data_objects.exists(sample_obj_path + '.md5'), True ) self.assert_irods_access( - self.group_name, - sample_obj_path, - self.irods_access_read, + self.group_name, sample_obj_path, self.irods_access_read ) self.assert_irods_access( - self.group_name, - sample_obj_path + '.md5', - self.irods_access_read, + self.group_name, sample_obj_path + '.md5', self.irods_access_read ) @@ -1117,8 +1101,7 @@ def test_move_alt_root_path(self): ) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - False, + self.irods.data_objects.exists(sample_obj_path + '.md5'), False ) flow_data = {'zone_uuid': str(self.zone.sodar_uuid)} @@ -1140,18 +1123,13 @@ def test_move_alt_root_path(self): ) self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True) self.assertEqual( - self.irods.data_objects.exists(sample_obj_path + '.md5'), - True, + self.irods.data_objects.exists(sample_obj_path + '.md5'), True ) self.assert_irods_access( - self.group_name, - sample_obj_path, - self.irods_access_read, + self.group_name, sample_obj_path, self.irods_access_read ) self.assert_irods_access( - self.group_name, - sample_obj_path + '.md5', - self.irods_access_read, + self.group_name, sample_obj_path + '.md5', self.irods_access_read ) @@ -1200,12 +1178,10 @@ def test_create(self): self.irods_backend.get_path(project) ) self.assertEqual( - project_coll.metadata.get_one('title').value, - project.title, + project_coll.metadata.get_one('title').value, project.title ) self.assertEqual( - project_coll.metadata.get_one('description').value, - META_EMPTY_VALUE, + project_coll.metadata.get_one('description').value, META_EMPTY_VALUE ) self.assertEqual( project_coll.metadata.get_one('parent_uuid').value, @@ -1234,12 +1210,10 @@ def test_update_metadata(self): self.assertNotEqual(self.project.description, UPDATED_DESC) project_coll = self.irods.collections.get(self.project_path) self.assertEqual( - project_coll.metadata.get_one('title').value, - self.project.title, + project_coll.metadata.get_one('title').value, self.project.title ) self.assertEqual( - project_coll.metadata.get_one('description').value, - META_EMPTY_VALUE, + project_coll.metadata.get_one('description').value, META_EMPTY_VALUE ) self.project.title = UPDATED_TITLE @@ -1256,12 +1230,10 @@ def test_update_metadata(self): project_coll = self.irods.collections.get(self.project_path) self.assertEqual( - project_coll.metadata.get_one('title').value, - UPDATED_TITLE, + project_coll.metadata.get_one('title').value, UPDATED_TITLE ) self.assertEqual( - project_coll.metadata.get_one('description').value, - UPDATED_DESC, + project_coll.metadata.get_one('description').value, UPDATED_DESC ) def test_update_parent(self): @@ -1339,7 +1311,10 @@ def test_enable_access(self): ) self.assert_irods_access(PUBLIC_GROUP, self.sample_path, None) - flow_data = {'path': self.sample_path, 'access': True} + flow_data = { + 'path': self.sample_path, + 'access': True, + } flow = self.taskflow.get_flow( irods_backend=self.irods_backend, project=self.project, @@ -1370,7 +1345,10 @@ def test_disable_access(self): PUBLIC_GROUP, self.sample_path, self.irods_access_read ) - flow_data = {'path': self.sample_path, 'access': False} + flow_data = { + 'path': self.sample_path, + 'access': False, + } flow = self.taskflow.get_flow( irods_backend=self.irods_backend, project=self.project, @@ -1393,7 +1371,10 @@ def test_revert(self): ) self.assert_irods_access(PUBLIC_GROUP, self.sample_path, None) - flow_data = {'path': self.sample_path, 'access': True} + flow_data = { + 'path': self.sample_path, + 'access': True, + } flow = self.taskflow.get_flow( irods_backend=self.irods_backend, project=self.project, @@ -1834,12 +1815,11 @@ def test_delete(self): project=self.project, active=True ).first() ) - flow_data = {} flow = self.taskflow.get_flow( irods_backend=self.irods_backend, project=self.project, flow_name='sheet_delete', - flow_data=flow_data, + flow_data={}, ) self.assertEqual(type(flow), SheetDeleteFlow) self._build_and_run(flow) @@ -1859,12 +1839,11 @@ def test_delete_colls(self): self.make_irods_colls(self.investigation) self.assertEqual(self.irods.collections.exists(self.sample_path), True) - flow_data = {} flow = self.taskflow.get_flow( irods_backend=self.irods_backend, project=self.project, flow_name='sheet_delete', - flow_data=flow_data, + flow_data={}, ) self.assertEqual(type(flow), SheetDeleteFlow) self._build_and_run(flow) @@ -1898,12 +1877,11 @@ def test_delete_zone(self): zone_path = self.irods_backend.get_path(zone) self.assertEqual(self.irods.collections.exists(zone_path), True) - flow_data = {} flow = self.taskflow.get_flow( irods_backend=self.irods_backend, project=self.project, flow_name='sheet_delete', - flow_data=flow_data, + flow_data={}, ) self.assertEqual(type(flow), SheetDeleteFlow) self._build_and_run(flow)