`.
+v0.14.2 (2024-03-15)
+====================
+
+Release for minor updates, maintenance and bug fixes.
+
+- Add CRAM file support for cancer/germline study links and IGV sessions
+- Add path glob pattern support for IGV session BAM/VCF omit settings
+- Add assay plugin shortcut collection creation in landing zones
+- Add iRODS data request details in timeline events
+- Add landing zone statistics in siteinfo
+- Fix iRODS data request issues
+- Fix IRODS_ROOT_PATH issues
+- Fix LDAP TLS settings
+- Fix iRODS stats badge stuck in "updating"
+- Fix landing zone status updating not working in project details card
+- Fix landing zone creation crash with large amount of created collections
+- Fix multiple sheet editor issues
+- Minor updates and bug fixes
+- Upgrade to SODAR Core v0.13.4
+
+Migration Guide
+---------------
+
+CRAM File Support
+ This release adds support for CRAM files. They are linked in studies and IGV
+ sessions similar to BAM files. If your project contains CRAM files uploaded
+ prior to this release, you will have to run :guilabel:`Update Sheet Cache`
+ in the Sample Sheets app to enable the files in study links and generated
+ IGV sessions. Alternatively, an administrator can run the ``synccache``
+ management command to update all projects or a specific project.
+
+
v0.14.1 (2023-12-12)
====================
diff --git a/docs_manual/source/ui_project_update.rst b/docs_manual/source/ui_project_update.rst
index 7161e9702..24f0ab5b4 100644
--- a/docs_manual/source/ui_project_update.rst
+++ b/docs_manual/source/ui_project_update.rst
@@ -53,12 +53,22 @@ Token for Sheet Synchronization
IGV session genome
Genome used in generating IGV session files for the project. The name needs
to be in a format accepted by IGV. Affects cancer and germline projects.
-BAM files to omit from IGV sessions
- Comma separated list of BAM file suffixes to omit from IGV sessions.
- Overrides site-wide setting, affects cancer and germline projects.
-VCF files to omit from IGV sessions
- Comma separated list of VCF file suffixes to omit from IGV sessions.
- Overrides site-wide setting, affects cancer and germline projects.
+BAM and CRAM paths to omit from IGV sessions
+ Comma-separated list of iRODS path glob patterns for omitting BAM and CRAM
+ files from IGV sessions and study shortcuts. Overrides site-wide setting,
+ affects cancer and germline projects. You can input paths to subcollections
+ under project sample data as well as file names. Using wildcards like ``*``
+ and ``?`` is permitted. Providing full iRODS paths is not necessary: each
+ pattern is assumed to start with a ``*`` wildcard. An empty setting means no
+ files or collections are excluded. You need to run
+ :guilabel:`Update Sheet Cache` in the Sample Sheet application for changes
+ to take effect.
+VCF paths to omit from IGV sessions
+ Comma-separated list of iRODS path glob patterns for omitting VCF files from
+ IGV sessions and study shortcuts. Overrides site-wide setting, affects
+ cancer and germline projects. Behaves similarly to the related BAM/CRAM
+ setting. You need to run :guilabel:`Update Sheet Cache` in the Sample Sheet
+ application for changes to take effect.
IP Restrict
Restrict project access to specific IP addresses if this is set.
IP Allow List
diff --git a/irodsbackend/api.py b/irodsbackend/api.py
index 34ebb994b..0314f3191 100644
--- a/irodsbackend/api.py
+++ b/irodsbackend/api.py
@@ -194,7 +194,8 @@ def sanitize_path(cls, path):
"""
Validate and sanitize iRODS path.
- :param path: iRODS collection or data object path (string)
+ :param path: Full or partial iRODS path to collection or data object
+ (string)
:raise: ValueError if iRODS path is invalid or unacceptable
:return: Sanitized iRODS path (string)
"""
@@ -261,7 +262,6 @@ def get_path(cls, obj):
obj_class, ', '.join(ACCEPTED_PATH_TYPES)
)
)
-
if obj_class == 'Project':
project = obj
else:
@@ -270,10 +270,8 @@ def get_path(cls, obj):
raise ValueError('Project not found for given object')
# Base path (project)
- rp = settings.IRODS_ROOT_PATH
- path = '/{zone}/projects/{root_prefix}{uuid_prefix}/{uuid}'.format(
- root_prefix=rp + '/' if rp else '',
- zone=settings.IRODS_ZONE,
+ path = '{root_path}/projects/{uuid_prefix}/{uuid}'.format(
+ root_path=cls.get_root_path(),
uuid_prefix=str(project.sodar_uuid)[:2],
uuid=project.sodar_uuid,
)
@@ -341,11 +339,16 @@ def get_zone_path(cls, project):
@classmethod
def get_root_path(cls):
- """Return the root path for SODAR data"""
- root_prefix = (
- '/' + settings.IRODS_ROOT_PATH if settings.IRODS_ROOT_PATH else ''
- )
- return '/{}{}'.format(settings.IRODS_ZONE, root_prefix)
+ """Return the SODAR root path in iRODS"""
+ irods_zone = settings.IRODS_ZONE
+ root_path = ''
+ if settings.IRODS_ROOT_PATH:
+ root_path = cls.sanitize_path(settings.IRODS_ROOT_PATH)
+ if root_path.startswith('/' + irods_zone):
+ raise ValueError(
+ 'iRODS zone must not be included in IRODS_ROOT_PATH'
+ )
+ return '/{}{}'.format(irods_zone, root_path)
@classmethod
def get_projects_path(cls):
@@ -368,13 +371,9 @@ def get_uuid_from_path(cls, path, obj_type):
:return: String or None
:raise: ValueError if obj_type is not accepted
"""
- root_prefix = (
- settings.IRODS_ROOT_PATH + '/' if settings.IRODS_ROOT_PATH else ''
- )
path_regex = {
- 'project': '/{}/'.format(settings.IRODS_ZONE)
- + root_prefix
- + 'projects/[a-zA-Z0-9]{2}/(.+?)(?:/|$)',
+ 'project': cls.get_root_path()
+ + '/projects/[a-zA-Z0-9]{2}/(.+?)(?:/|$)',
'study': '/study_(.+?)(?:/|$)',
'assay': '/assay_(.+?)(?:/|$)',
}
@@ -582,7 +581,7 @@ def get_colls_recursively(cls, coll):
return [iRODSCollection(coll.manager, row) for row in query]
def get_objs_recursively(
- self, irods, coll, md5=False, name_like=None, limit=None
+ self, irods, coll, include_md5=False, name_like=None, limit=None
):
"""
Return objects below a coll recursively. Replacement for the
@@ -591,13 +590,13 @@ def get_objs_recursively(
:param irods: iRODSSession object
:param coll: Collection object
- :param md5: if True, return .md5 files, otherwise anything but them
+ :param include_md5: if True, include .md5 files
:param name_like: Filtering of file names (string or list of strings)
- :param limit: Limit search to n rows (int)
+ :param limit: Limit retrieval to n rows (int)
:return: List
"""
ret = []
- md5_filter = 'LIKE' if md5 else 'NOT LIKE'
+ md5_filter = '' if include_md5 else 'AND data_name NOT LIKE \'%.md5\''
path_lookup = []
q_count = 1
@@ -607,8 +606,7 @@ def _do_query(irods, nl=None):
'r_data_main.modify_ts as modify_ts, coll_name '
'FROM r_data_main JOIN r_coll_main USING (coll_id) '
'WHERE (coll_name = \'{coll_path}\' '
- 'OR coll_name LIKE \'{coll_path}/%\') '
- 'AND data_name {md5_filter} \'%.md5\''.format(
+ 'OR coll_name LIKE \'{coll_path}/%\') {md5_filter}'.format(
coll_path=coll.path, md5_filter=md5_filter
)
)
@@ -621,7 +619,8 @@ def _do_query(irods, nl=None):
sql += ' OR '
sql += 'data_name LIKE \'%{}%\''.format(n)
sql += ')'
- if not md5 and limit:
+ # TODO: Shouldn't we also allow limit if including .md5 files?
+ if not include_md5 and limit:
sql += ' LIMIT {}'.format(limit)
# logger.debug('Object list query = "{}"'.format(sql))
@@ -680,21 +679,21 @@ def get_objects(
self,
irods,
path,
- check_md5=False,
+ include_md5=False,
include_colls=False,
name_like=None,
limit=None,
):
"""
- Return an iRODS object list.
+ Return a flat iRODS object list recursively under a given path.
:param irods: iRODSSession object
:param path: Full path to iRODS collection
- :param check_md5: Whether to add md5 checksum file info (bool)
+ :param include_md5: Include .md5 checksum files (bool)
:param include_colls: Include collections (bool)
:param name_like: Filtering of file names (string or list of strings)
:param limit: Limit search to n rows (int)
- :return: Dict
+ :return: List
:raise: FileNotFoundError if collection is not found
"""
try:
@@ -706,43 +705,21 @@ def get_objects(
if not isinstance(name_like, list):
name_like = [name_like]
name_like = [n.replace('_', '\_') for n in name_like] # noqa
- ret = {'irods_data': []}
- md5_paths = None
-
- data_objs = self.get_objs_recursively(
- irods, coll, name_like=name_like, limit=limit
+ ret = self.get_objs_recursively(
+ irods,
+ coll,
+ include_md5=include_md5,
+ name_like=name_like,
+ limit=limit,
)
- if data_objs and check_md5:
- md5_paths = [
- o['path']
- for o in self.get_objs_recursively(
- irods, coll, md5=True, name_like=name_like
- )
- ]
-
- for o in data_objs:
- if check_md5:
- if o['path'] + '.md5' in md5_paths:
- o['md5_file'] = True
- else:
- o['md5_file'] = False
- ret['irods_data'].append(o)
# Add collections if enabled
- # TODO: Combine into a single query?
+ # TODO: Combine into a single query? (see issues #1440, #1883)
if include_colls:
colls = self.get_colls_recursively(coll)
for c in colls:
- ret['irods_data'].append(
- {
- 'name': c.name,
- 'type': 'coll',
- 'path': c.path,
- }
- )
- ret['irods_data'] = sorted(
- ret['irods_data'], key=lambda x: x['path']
- )
+ ret.append({'name': c.name, 'type': 'coll', 'path': c.path})
+ ret = sorted(ret, key=lambda x: x['path'])
return ret
@classmethod
diff --git a/irodsbackend/static/irodsbackend/js/irodsbackend.js b/irodsbackend/static/irodsbackend/js/irodsbackend.js
index 7a287f7c9..f4f9deaaf 100644
--- a/irodsbackend/static/irodsbackend/js/irodsbackend.js
+++ b/irodsbackend/static/irodsbackend/js/irodsbackend.js
@@ -2,54 +2,48 @@
Collection statistics updating function
***************************************/
var updateCollectionStats = function() {
- var statsPaths = [];
- var projectUUID = null;
+ var paths = {};
$('span.sodar-irods-stats').each(function () {
- var currentPath = decodeURIComponent($(this).attr(
- 'data-stats-url').split('=')[1]);
- if (!projectUUID) {
- projectUUID = currentPath.split('/')[4];
+ var projectUUID = $(this).attr('data-project-uuid');
+ if (!paths.hasOwnProperty(projectUUID)) {
+ paths[projectUUID] = [];
}
- if (!(statsPaths.includes(currentPath))){
- statsPaths.push(currentPath);
+ var currentPath = $(this).attr('data-stats-path');
+ if (!(paths[projectUUID].includes(currentPath))) {
+ paths[projectUUID].push(currentPath);
}
});
- var d = {paths: statsPaths};
-
- if (projectUUID) {
+ $.each(paths, function(projectUUID, paths) {
$.ajax({
url: '/irodsbackend/ajax/stats/' + projectUUID,
method: 'POST',
dataType: 'json',
- data: d,
+ data: {paths: paths},
contentType: "application/x-www-form-urlencoded; charset=UTF-8",
traditional: true
}).done(function (data) {
$('span.sodar-irods-stats').each(function () {
var statsSpan = $(this);
- var irodsPath = decodeURIComponent(statsSpan.attr(
- 'data-stats-url')).split('=')[1];
- var fileSuffix = 's';
- for (idx in data['coll_objects']) {
- if (data['coll_objects'].hasOwnProperty(idx)) {
- if (irodsPath === data['coll_objects'][idx]['path']
- && data['coll_objects'][idx]['status'] === '200') {
- var stats = data['coll_objects'][idx]['stats'];
- if (stats['file_count'] === 1) {
- fileSuffix = '';
- }
- statsSpan.text(
- stats['file_count'] + ' file' + fileSuffix +
- ' ('+ humanFileSize(stats['total_size'], true) + ')');
- break;
- }
- }
+ var path = statsSpan.attr('data-stats-path');
+ var s = 's';
+ if (path in data['irods_stats']) {
+ var status = data['irods_stats'][path]['status'];
+ if (status === 200) {
+ var fileCount = data['irods_stats'][path]['file_count'];
+ var totalSize = data['irods_stats'][path]['total_size'];
+ if (fileCount === 1) s = '';
+ statsSpan.text(
+ fileCount + ' file' + s +
+ ' (' + humanFileSize(totalSize, true) + ')');
+ } else if (status === 404) statsSpan.text('Not found');
+ else if (status === 403) statsSpan.text('Denied');
+ else statsSpan.text('Error');
}
});
});
- }
+ });
};
/***************************************
diff --git a/irodsbackend/templatetags/irodsbackend_tags.py b/irodsbackend/templatetags/irodsbackend_tags.py
index 81ce78361..ba95f84f3 100644
--- a/irodsbackend/templatetags/irodsbackend_tags.py
+++ b/irodsbackend/templatetags/irodsbackend_tags.py
@@ -1,6 +1,4 @@
from django import template
-from django.urls import reverse
-from django.utils.http import urlencode
from irodsbackend.api import IrodsAPI
@@ -26,18 +24,11 @@ def get_stats_html(irods_path, project):
:param project: Project object
:return: String (contains HTML)
"""
- url_kwargs = {'project': str(project.sodar_uuid)}
- query_string = {'path': irods_path}
- url = (
- reverse('irodsbackend:stats', kwargs=url_kwargs)
- + '?'
- + urlencode(query_string)
- )
return (
''
+ 'data-stats-path="{}" data-project-uuid="{}">'
' Updating..'
- ''.format(url)
+ ''.format(irods_path, project.sodar_uuid)
)
diff --git a/irodsbackend/tests/test_api.py b/irodsbackend/tests/test_api.py
index 01db8615c..553da917e 100644
--- a/irodsbackend/tests/test_api.py
+++ b/irodsbackend/tests/test_api.py
@@ -41,6 +41,7 @@
ZONE_DESC = 'description'
IRODS_ZONE = settings.IRODS_ZONE
IRODS_ROOT_PATH = 'sodar/root'
+IRODS_ROOT_PATH_INVALID = 'sodar/../root'
SAMPLE_COLL = settings.IRODS_SAMPLE_COLL
LANDING_ZONE_COLL = settings.IRODS_LANDING_ZONE_COLL
IRODS_ENV = {
@@ -142,9 +143,9 @@ def test_get_path_project(self):
self.assertEqual(expected, path)
@override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH)
- def test_get_path_project_root_path(self):
+ def test_get_path_project_root(self):
"""Test get_irods_path() with Project object and root path"""
- expected = '/{zone}/projects/{root_path}/{uuid_prefix}/{uuid}'.format(
+ expected = '/{zone}/{root_path}/projects/{uuid_prefix}/{uuid}'.format(
zone=IRODS_ZONE,
root_path=IRODS_ROOT_PATH,
uuid_prefix=str(self.project.sodar_uuid)[:2],
@@ -230,6 +231,24 @@ def test_get_root_path_with_path(self):
expected = '/{}/{}'.format(IRODS_ZONE, IRODS_ROOT_PATH)
self.assertEqual(self.irods_backend.get_root_path(), expected)
+ @override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH + '/')
+ def test_get_root_path_trailing_slash(self):
+ """Test get_root_path() with root path setting and trailing slash"""
+ expected = '/{}/{}'.format(IRODS_ZONE, IRODS_ROOT_PATH)
+ self.assertEqual(self.irods_backend.get_root_path(), expected)
+
+ @override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH_INVALID)
+ def test_get_root_path_invalid(self):
+ """Test get_root_path() with invalid root path"""
+ with self.assertRaises(ValueError):
+ self.irods_backend.get_root_path()
+
+ @override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH_INVALID)
+ def test_get_root_path_with_zone(self):
+ """Test get_root_path() with value containing the zone"""
+ with self.assertRaises(ValueError):
+ self.irods_backend.get_root_path()
+
def test_get_projects_path(self):
"""Test get_projects_path() with default settings"""
expected = '/{}/projects'.format(IRODS_ZONE)
@@ -262,7 +281,6 @@ def test_get_uuid_from_path_project(self):
def test_get_uuid_from_path_wrong_type(self):
"""Test get_uuid_from_path() with invalid type (should fail)"""
path = self.irods_backend.get_path(self.study)
-
with self.assertRaises(ValueError):
self.irods_backend.get_uuid_from_path(path, 'investigation')
diff --git a/irodsbackend/tests/test_api_taskflow.py b/irodsbackend/tests/test_api_taskflow.py
index 3282796f8..4a138f0d6 100644
--- a/irodsbackend/tests/test_api_taskflow.py
+++ b/irodsbackend/tests/test_api_taskflow.py
@@ -103,18 +103,17 @@ def test_get_objects(self):
self.irods.data_objects.create(path + '/' + TEST_FILE_NAME)
self.irods.data_objects.create(path + '/{}.md5'.format(TEST_FILE_NAME))
obj_list = self.irods_backend.get_objects(
- self.irods, path, check_md5=True
+ self.irods, path, include_md5=True
)
self.assertIsNotNone(obj_list)
- self.assertEqual(len(obj_list['irods_data']), 1) # md5 not listed
+ self.assertEqual(len(obj_list), 2)
- obj = obj_list['irods_data'][0]
+ obj = obj_list[0]
expected = {
'name': TEST_FILE_NAME,
'type': 'obj',
'path': path + '/' + TEST_FILE_NAME,
'size': 0,
- 'md5_file': True,
'modify_time': obj['modify_time'],
}
self.assertEqual(obj, expected)
@@ -127,10 +126,10 @@ def test_get_objects_with_colls(self):
self.irods.data_objects.create(path + '/{}.md5'.format(TEST_FILE_NAME))
self.irods.collections.create(path + '/subcoll')
obj_list = self.irods_backend.get_objects(
- self.irods, path, include_colls=True, check_md5=True
+ self.irods, path, include_md5=True, include_colls=True
)
self.assertIsNotNone(obj_list)
- self.assertEqual(len(obj_list['irods_data']), 2) # md5 not listed
+ self.assertEqual(len(obj_list), 3)
expected = [
{
@@ -143,11 +142,17 @@ def test_get_objects_with_colls(self):
'type': 'obj',
'path': path + '/' + TEST_FILE_NAME,
'size': 0,
- 'md5_file': True,
- 'modify_time': obj_list['irods_data'][1]['modify_time'],
+ 'modify_time': obj_list[1]['modify_time'],
+ },
+ {
+ 'name': TEST_FILE_NAME + '.md5',
+ 'type': 'obj',
+ 'path': path + '/' + TEST_FILE_NAME + '.md5',
+ 'size': 0,
+ 'modify_time': obj_list[2]['modify_time'],
},
]
- self.assertEqual(obj_list['irods_data'], expected)
+ self.assertEqual(obj_list, expected)
def test_get_objects_multi(self):
"""Test get_objects() with multiple search terms"""
@@ -161,30 +166,10 @@ def test_get_objects_multi(self):
self.irods,
path,
name_like=[TEST_FILE_NAME, TEST_FILE_NAME2],
- check_md5=True,
+ include_md5=True,
)
self.assertIsNotNone(obj_list)
- self.assertEqual(len(obj_list['irods_data']), 2) # md5 not listed
-
- expected = [
- {
- 'name': TEST_FILE_NAME,
- 'type': 'obj',
- 'path': path + '/' + TEST_FILE_NAME,
- 'size': 0,
- 'md5_file': True,
- 'modify_time': obj_list['irods_data'][0]['modify_time'],
- },
- {
- 'name': TEST_FILE_NAME2,
- 'type': 'obj',
- 'path': path + '/' + TEST_FILE_NAME2,
- 'size': 0,
- 'md5_file': True,
- 'modify_time': obj_list['irods_data'][1]['modify_time'],
- },
- ]
- self.assertEqual(obj_list['irods_data'], expected)
+ self.assertEqual(len(obj_list), 4)
def test_get_objects_long_query(self):
"""Test get_objects() with a long query"""
@@ -212,30 +197,10 @@ def test_get_objects_long_query(self):
self.irods,
path,
name_like=[TEST_FILE_NAME, TEST_FILE_NAME2],
- check_md5=True,
+ include_md5=True,
)
self.assertIsNotNone(obj_list)
- self.assertEqual(len(obj_list['irods_data']), 2) # md5 not listed
-
- expected = [
- {
- 'name': TEST_FILE_NAME,
- 'type': 'obj',
- 'path': path + '/' + TEST_FILE_NAME,
- 'size': 0,
- 'md5_file': True,
- 'modify_time': obj_list['irods_data'][0]['modify_time'],
- },
- {
- 'name': TEST_FILE_NAME2,
- 'type': 'obj',
- 'path': path + '/' + TEST_FILE_NAME2,
- 'size': 0,
- 'md5_file': True,
- 'modify_time': obj_list['irods_data'][1]['modify_time'],
- },
- ]
- self.assertEqual(obj_list['irods_data'], expected)
+ self.assertEqual(len(obj_list), 4)
def test_get_objects_empty_coll(self):
"""Test get_objects() with an empty sample collection"""
@@ -243,7 +208,7 @@ def test_get_objects_empty_coll(self):
path = self.irods_backend.get_path(self.project) + '/' + SAMPLE_COLL
obj_list = self.irods_backend.get_objects(self.irods, path)
self.assertIsNotNone(obj_list)
- self.assertEqual(len(obj_list['irods_data']), 0)
+ self.assertEqual(len(obj_list), 0)
def test_get_objects_no_coll(self):
"""Test get_objects() with no created collections"""
@@ -258,10 +223,10 @@ def test_get_objects_limit(self):
self.irods.data_objects.create(path + '/' + TEST_FILE_NAME)
self.irods.data_objects.create(path + '/' + TEST_FILE_NAME2)
obj_list = self.irods_backend.get_objects(
- self.irods, path, check_md5=False, limit=1
+ self.irods, path, include_md5=False, limit=1
)
self.assertIsNotNone(obj_list)
- self.assertEqual(len(obj_list['irods_data']), 1) # Limited to 1
+ self.assertEqual(len(obj_list), 1) # Limited to 1
def test_issue_ticket(self):
"""Test issue_ticket()"""
diff --git a/irodsbackend/tests/test_views.py b/irodsbackend/tests/test_views.py
index 945762603..175e295e7 100644
--- a/irodsbackend/tests/test_views.py
+++ b/irodsbackend/tests/test_views.py
@@ -1,430 +1,18 @@
"""Tests for views in the irodsbackend app"""
import base64
-import os
-from irods.test.helpers import make_object
-
-from django.conf import settings
-from django.test import RequestFactory, override_settings
+from django.test import override_settings
from django.urls import reverse
from test_plus.test import TestCase
-# Projectroles dependency
-from projectroles.models import SODAR_CONSTANTS
-
-# Taskflowbackend dependency
-from taskflowbackend.tests.base import TaskflowViewTestBase
-
-
-# SODAR constants
-PROJECT_ROLE_OWNER = SODAR_CONSTANTS['PROJECT_ROLE_OWNER']
-PROJECT_ROLE_DELEGATE = SODAR_CONSTANTS['PROJECT_ROLE_DELEGATE']
-PROJECT_ROLE_CONTRIBUTOR = SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR']
-PROJECT_ROLE_GUEST = SODAR_CONSTANTS['PROJECT_ROLE_GUEST']
-PROJECT_TYPE_CATEGORY = SODAR_CONSTANTS['PROJECT_TYPE_CATEGORY']
-PROJECT_TYPE_PROJECT = SODAR_CONSTANTS['PROJECT_TYPE_PROJECT']
# Local constants
-IRODS_TEMP_COLL = 'temp'
-IRODS_OBJ_SIZE = 1024
-IRODS_OBJ_CONTENT = ''.join('x' for _ in range(IRODS_OBJ_SIZE))
-IRODS_OBJ_NAME = 'test1.txt'
-IRODS_MD5_NAME = 'test1.txt.md5'
-IRODS_NON_PROJECT_PATH = (
- '/' + settings.IRODS_ZONE + '/home/' + settings.IRODS_USER
-)
-IRODS_FAIL_COLL = 'xeiJ1Vie'
LOCAL_USER_NAME = 'local_user'
LOCAL_USER_PASS = 'password'
-class IrodsbackendViewTestBase(TaskflowViewTestBase):
- """Base class for irodsbackend UI view testing"""
-
- def setUp(self):
- super().setUp()
- self.req_factory = RequestFactory()
- # Init project with owner in taskflow
- self.project, self.owner_as = self.make_project_taskflow(
- 'TestProject', PROJECT_TYPE_PROJECT, self.category, self.user
- )
- # Create test collection in iRODS
- self.project_path = self.irods_backend.get_path(self.project)
- self.irods_path = os.path.join(self.project_path, IRODS_TEMP_COLL)
- self.irods_coll = self.irods.collections.create(self.irods_path)
-
-
-class TestIrodsStatisticsAjaxView(IrodsbackendViewTestBase):
- """Tests for the landing zone collection statistics Ajax view"""
-
- def setUp(self):
- super().setUp()
- self.post_url = self.irods_backend.get_url(
- view='stats', project=self.project, method='POST'
- )
-
- def test_get_empty_coll(self):
- """Test GET request for stats on empty iRODS collection"""
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='stats', project=self.project, path=self.irods_path
- )
- )
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.data['file_count'], 0)
- self.assertEqual(response.data['total_size'], 0)
-
- def test_get_invalid_coll(self):
- """Test GET request with invalid collection (should fail)"""
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='stats', project=self.project, path=self.irods_path
- )
- + '%2F..'
- )
- self.assertEqual(response.status_code, 400)
-
- def test_get_coll_obj(self):
- """Test GET for stats on collection with data object"""
- # Put data object in iRODS
- obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
- make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='stats', project=self.project, path=self.irods_path
- )
- )
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.data['file_count'], 1)
- self.assertEqual(response.data['total_size'], IRODS_OBJ_SIZE)
-
- def test_get_coll_md5(self):
- """Test GET for stats on collection with data object and md5"""
- # Put data object in iRODS
- obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
- make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
- # Put MD5 data object in iRODS
- md5_path = self.irods_path + '/' + IRODS_MD5_NAME
- make_object(self.irods, md5_path, IRODS_OBJ_CONTENT) # Not actual md5
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='stats', project=self.project, path=self.irods_path
- )
- )
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.data['file_count'], 1) # md5 not counted
- self.assertEqual(response.data['total_size'], IRODS_OBJ_SIZE)
-
- def test_get_coll_not_found(self):
- """Test GET for stats on non-existing collection"""
- fail_path = self.irods_path + '/' + IRODS_FAIL_COLL
- self.assertEqual(self.irods.collections.exists(fail_path), False)
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='stats', project=self.project, path=fail_path
- )
- )
- self.assertEqual(response.status_code, 404)
-
- def test_get_coll_not_in_project(self):
- """Test GET for stats on collection not belonging to project"""
- self.assertEqual(
- self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True
- )
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='stats',
- project=self.project,
- path=IRODS_NON_PROJECT_PATH,
- )
- )
- self.assertEqual(response.status_code, 400)
-
- def test_get_no_access(self):
- """Test GET for stats with no access for iRODS collection"""
- new_user = self.make_user('new_user')
- self.make_assignment(
- self.project, new_user, self.role_contributor
- ) # No taskflow
- with self.login(new_user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='stats', project=self.project, path=self.irods_path
- )
- )
- self.assertEqual(response.status_code, 403)
-
- def test_post_empty_coll_stats(self):
- """Test POST for batch stats on empty iRODS collections"""
- post_data = {
- 'paths': [self.irods_path, self.irods_path],
- 'md5': ['0', '0'],
- }
- with self.login(self.user):
- response = self.client.post(self.post_url, post_data)
- self.assertEqual(response.status_code, 200)
- for idx in range(len(response.data['coll_objects'])):
- self.assertEqual(
- response.data['coll_objects'][idx]['status'], '200'
- )
- self.assertEqual(
- response.data['coll_objects'][idx]['stats']['file_count'], 0
- )
- self.assertEqual(
- response.data['coll_objects'][idx]['stats']['total_size'], 0
- )
-
- def test_post_coll_stats(self):
- """Test POST for batch stats on collections with data object"""
- # Put data object in iRODS
- obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
- make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
- post_data = {
- 'paths': [self.irods_path, self.irods_path],
- 'md5': ['0', '0'],
- }
- with self.login(self.user):
- response = self.client.post(self.post_url, post_data)
- self.assertEqual(response.status_code, 200)
- for idx in range(len(response.data['coll_objects'])):
- self.assertEqual(
- response.data['coll_objects'][idx]['status'], '200'
- )
- self.assertEqual(
- response.data['coll_objects'][idx]['stats']['file_count'], 1
- )
- self.assertEqual(
- response.data['coll_objects'][idx]['stats']['total_size'],
- IRODS_OBJ_SIZE,
- )
-
- def test_post_coll_md5_stats(self):
- """Test POST for batch stats on collections with data object and md5"""
- # Put data object in iRODS
- obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
- make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
- # Put MD5 data object in iRODS
- md5_path = self.irods_path + '/' + IRODS_MD5_NAME
- make_object(self.irods, md5_path, IRODS_OBJ_CONTENT) # Not actual md5
- post_data = {
- 'paths': [self.irods_path, self.irods_path],
- 'md5': ['1', '1'],
- }
- with self.login(self.user):
- response = self.client.post(self.post_url, post_data)
- self.assertEqual(response.status_code, 200)
- for idx in range(len(response.data['coll_objects'])):
- self.assertEqual(
- response.data['coll_objects'][idx]['status'], '200'
- )
- self.assertEqual(
- response.data['coll_objects'][idx]['stats']['file_count'], 1
- )
- self.assertEqual(
- response.data['coll_objects'][idx]['stats']['total_size'],
- IRODS_OBJ_SIZE,
- )
-
- def test_post_coll_not_found(self):
- """Test POST for stats on non-existing collections"""
- fail_path = self.irods_path + '/' + IRODS_FAIL_COLL
- self.assertEqual(self.irods.collections.exists(fail_path), False)
- post_data = {'paths': [fail_path, fail_path], 'md5': ['0', '0']}
- with self.login(self.user):
- response = self.client.post(self.post_url, post_data)
- self.assertEqual(response.status_code, 200)
- for idx in range(len(response.data['coll_objects'])):
- self.assertEqual(
- response.data['coll_objects'][idx]['status'], '404'
- )
- self.assertEqual(response.data['coll_objects'][idx]['stats'], {})
-
- def test_post_coll_not_in_project(self):
- """Test POST for stats on collections not belonging to project"""
- self.assertEqual(
- self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True
- )
- post_data = {
- 'paths': [IRODS_NON_PROJECT_PATH, IRODS_NON_PROJECT_PATH],
- 'md5': ['0', '0'],
- }
- with self.login(self.user):
- response = self.client.post(self.post_url, post_data)
- self.assertEqual(response.status_code, 200)
- for idx in range(len(response.data['coll_objects'])):
- self.assertEqual(
- response.data['coll_objects'][idx]['status'], '400'
- )
- self.assertEqual(response.data['coll_objects'][idx]['stats'], {})
-
- def test_post_no_access(self):
- """Test POST for batch stats with no access for collections"""
- new_user = self.make_user('new_user')
- self.make_assignment(
- self.project, new_user, self.role_contributor
- ) # No taskflow
- post_data = {
- 'paths': [self.irods_path, self.irods_path],
- 'md5': ['0', '0'],
- }
- with self.login(new_user):
- response = self.client.post(self.post_url, post_data)
- self.assertEqual(response.status_code, 200)
- for idx in range(len(response.data['coll_objects'])):
- self.assertEqual(response.data['coll_objects'][idx]['stats'], {})
-
- def test_post_one_empty_coll(self):
- """Test POST for batch stats on one (empty) collection"""
- post_data = {'paths': [self.irods_path], 'md5': ['0']}
- with self.login(self.user):
- response = self.client.post(self.post_url, post_data)
- self.assertEqual(response.status_code, 200)
- for idx in range(len(response.data['coll_objects'])):
- self.assertEqual(
- response.data['coll_objects'][idx]['status'], '200'
- )
- self.assertEqual(
- response.data['coll_objects'][idx]['stats']['file_count'], 0
- )
- self.assertEqual(
- response.data['coll_objects'][idx]['stats']['total_size'], 0
- )
-
-
-class TestIrodsObjectListAjaxView(IrodsbackendViewTestBase):
- """Tests for the landing zone data object listing Ajax view"""
-
- def test_get_empty_coll(self):
- """Test GET for listing an empty collection in iRODS"""
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='list',
- path=self.irods_path,
- project=self.project,
- md5=0,
- )
- )
- self.assertEqual(response.status_code, 200)
- self.assertEqual(len(response.data['irods_data']), 0)
-
- def test_get_coll_obj(self):
- """Test GET for listing a collection with a data object"""
- # Put data object in iRODS
- obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
- data_obj = make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
-
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='list',
- path=self.irods_path,
- project=self.project,
- md5=0,
- )
- )
- self.assertEqual(response.status_code, 200)
- self.assertEqual(len(response.data['irods_data']), 1)
-
- list_obj = response.data['irods_data'][0]
- self.assertNotIn('md5_file', list_obj)
- self.assertEqual(data_obj.name, list_obj['name'])
- self.assertEqual(data_obj.path, list_obj['path'])
- self.assertEqual(data_obj.size, IRODS_OBJ_SIZE)
-
- def test_get_coll_md5(self):
- """Test GET for listing a collection with a data object and md5"""
- obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
- make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
- # Put MD5 data object in iRODS
- md5_path = self.irods_path + '/' + IRODS_MD5_NAME
- make_object(self.irods, md5_path, IRODS_OBJ_CONTENT) # Not actual md5
-
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='list',
- path=self.irods_path,
- project=self.project,
- md5=1,
- )
- )
- self.assertEqual(response.status_code, 200)
- self.assertEqual(len(response.data['irods_data']), 1) # Still 1
- self.assertEqual(response.data['irods_data'][0]['md5_file'], True)
-
- def test_get_coll_md5_no_file(self):
- """Test GET with md5 set True but no md5 file"""
- obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
- make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
-
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='list',
- path=self.irods_path,
- project=self.project,
- md5=1,
- )
- )
- self.assertEqual(response.status_code, 200)
- self.assertEqual(len(response.data['irods_data']), 1)
- self.assertEqual(response.data['irods_data'][0]['md5_file'], False)
-
- def test_get_coll_not_found(self):
- """Test GET for listing a collection which doesn't exist"""
- fail_path = self.irods_path + '/' + IRODS_FAIL_COLL
- self.assertEqual(self.irods.collections.exists(fail_path), False)
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='list', path=fail_path, project=self.project, md5=0
- )
- )
- self.assertEqual(response.status_code, 404)
-
- def test_get_coll_not_in_project(self):
- """Test GET for listing a collection not belonging to project"""
- self.assertEqual(
- self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True
- )
- with self.login(self.user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='list',
- path=IRODS_NON_PROJECT_PATH,
- project=self.project,
- md5=0,
- )
- )
- self.assertEqual(response.status_code, 400)
-
- def test_get_no_access(self):
- """Test GET for listing with no acces for the iRODS folder"""
- new_user = self.make_user('new_user')
- self.make_assignment(
- self.project, new_user, self.role_contributor
- ) # No taskflow
- with self.login(new_user):
- response = self.client.get(
- self.irods_backend.get_url(
- view='list',
- path=self.irods_path,
- project=self.project,
- md5=0,
- )
- )
- self.assertEqual(response.status_code, 403)
-
-
class TestLocalAuthAPIView(TestCase):
"""Tests for LocalAuthAPIView"""
diff --git a/irodsbackend/tests/test_views_taskflow.py b/irodsbackend/tests/test_views_taskflow.py
new file mode 100644
index 000000000..af65ed632
--- /dev/null
+++ b/irodsbackend/tests/test_views_taskflow.py
@@ -0,0 +1,386 @@
+"""Tests for views in the irodsbackend app with taskflow enabled"""
+
+import os
+
+from irods.test.helpers import make_object
+
+from django.conf import settings
+from django.test import RequestFactory
+
+# Projectroles dependency
+from projectroles.models import SODAR_CONSTANTS
+
+# Taskflowbackend dependency
+from taskflowbackend.tests.base import TaskflowViewTestBase
+
+
+# SODAR constants
+PROJECT_ROLE_OWNER = SODAR_CONSTANTS['PROJECT_ROLE_OWNER']
+PROJECT_ROLE_DELEGATE = SODAR_CONSTANTS['PROJECT_ROLE_DELEGATE']
+PROJECT_ROLE_CONTRIBUTOR = SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR']
+PROJECT_ROLE_GUEST = SODAR_CONSTANTS['PROJECT_ROLE_GUEST']
+PROJECT_TYPE_CATEGORY = SODAR_CONSTANTS['PROJECT_TYPE_CATEGORY']
+PROJECT_TYPE_PROJECT = SODAR_CONSTANTS['PROJECT_TYPE_PROJECT']
+
+# Local constants
+IRODS_TEMP_COLL = 'temp'
+IRODS_TEMP_COLL2 = 'temp2'
+IRODS_OBJ_SIZE = 1024
+IRODS_OBJ_CONTENT = ''.join('x' for _ in range(IRODS_OBJ_SIZE))
+IRODS_OBJ_NAME = 'test1.txt'
+IRODS_MD5_NAME = 'test1.txt.md5'
+IRODS_NON_PROJECT_PATH = (
+ '/' + settings.IRODS_ZONE + '/home/' + settings.IRODS_USER
+)
+IRODS_FAIL_COLL = 'xeiJ1Vie'
+
+
+class IrodsbackendViewTestBase(TaskflowViewTestBase):
+ """Base class for irodsbackend UI view testing"""
+
+ def setUp(self):
+ super().setUp()
+ self.req_factory = RequestFactory()
+ # Init project with owner in taskflow
+ self.project, self.owner_as = self.make_project_taskflow(
+ 'TestProject', PROJECT_TYPE_PROJECT, self.category, self.user
+ )
+ # Create test collection in iRODS
+ # NOTE: Not fully valid sample paths, needs make_irods_colls() for that
+ self.project_path = self.irods_backend.get_path(self.project)
+ self.irods_path = os.path.join(self.project_path, IRODS_TEMP_COLL)
+ self.irods_coll = self.irods.collections.create(self.irods_path)
+
+
+class TestIrodsStatisticsAjaxView(IrodsbackendViewTestBase):
+ """Tests for the landing zone collection statistics Ajax view"""
+
+ def setUp(self):
+ super().setUp()
+ self.post_url = self.irods_backend.get_url(
+ view='stats', project=self.project, method='POST'
+ )
+
+ def test_get_empty_coll(self):
+ """Test GET request for stats on empty iRODS collection"""
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='stats', project=self.project, path=self.irods_path
+ )
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.data['file_count'], 0)
+ self.assertEqual(response.data['total_size'], 0)
+
+ def test_get_invalid_coll(self):
+ """Test GET request with invalid collection (should fail)"""
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='stats', project=self.project, path=self.irods_path
+ )
+ + '%2F..'
+ )
+ self.assertEqual(response.status_code, 400)
+
+ def test_get_coll_obj(self):
+ """Test GET for stats on collection with data object"""
+ # Put data object in iRODS
+ obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
+ make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='stats', project=self.project, path=self.irods_path
+ )
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.data['file_count'], 1)
+ self.assertEqual(response.data['total_size'], IRODS_OBJ_SIZE)
+
+ def test_get_coll_md5(self):
+ """Test GET for stats on collection with data object and md5"""
+ # Put data object in iRODS
+ obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
+ make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
+ # Put MD5 data object in iRODS
+ md5_path = self.irods_path + '/' + IRODS_MD5_NAME
+ make_object(self.irods, md5_path, IRODS_OBJ_CONTENT) # Not actual md5
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='stats', project=self.project, path=self.irods_path
+ )
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.data['file_count'], 1) # md5 not counted
+ self.assertEqual(response.data['total_size'], IRODS_OBJ_SIZE)
+
+ def test_get_coll_not_found(self):
+ """Test GET for stats on non-existing collection"""
+ fail_path = self.irods_path + '/' + IRODS_FAIL_COLL
+ self.assertEqual(self.irods.collections.exists(fail_path), False)
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='stats', project=self.project, path=fail_path
+ )
+ )
+ self.assertEqual(response.status_code, 404)
+
+ def test_get_coll_not_in_project(self):
+ """Test GET for stats on collection not belonging to project"""
+ self.assertEqual(
+ self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True
+ )
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='stats',
+ project=self.project,
+ path=IRODS_NON_PROJECT_PATH,
+ )
+ )
+ self.assertEqual(response.status_code, 400)
+
+ def test_get_no_access(self):
+ """Test GET for stats with no access for iRODS collection"""
+ new_user = self.make_user('new_user')
+ self.make_assignment(
+ self.project, new_user, self.role_contributor
+ ) # No taskflow
+ with self.login(new_user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='stats', project=self.project, path=self.irods_path
+ )
+ )
+ self.assertEqual(response.status_code, 403)
+
+ def test_post_empty_coll(self):
+ """Test POST on empty iRODS collection"""
+ post_data = {'paths': [self.irods_path]}
+ with self.login(self.user):
+ response = self.client.post(self.post_url, post_data)
+ self.assertEqual(response.status_code, 200)
+ response_data = response.data['irods_stats']
+ self.assertEqual(len(response_data.values()), 1)
+ expected = {
+ self.irods_path: {'status': 200, 'file_count': 0, 'total_size': 0}
+ }
+ self.assertEqual(response_data, expected)
+
+ def test_post_non_empty_coll(self):
+ """Test POST with data object in collection"""
+ obj_path = os.path.join(self.irods_path, IRODS_OBJ_NAME)
+ make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
+ post_data = {'paths': [self.irods_path]}
+ with self.login(self.user):
+ response = self.client.post(self.post_url, post_data)
+ expected = {
+ self.irods_path: {
+ 'status': 200,
+ 'file_count': 1,
+ 'total_size': IRODS_OBJ_SIZE,
+ }
+ }
+ self.assertEqual(response.data['irods_stats'], expected)
+
+ def test_post_md5_file(self):
+ """Test POST with .md5 file in collection"""
+ obj_path = os.path.join(self.irods_path, IRODS_OBJ_NAME)
+ obj = make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
+ self.make_irods_md5_object(obj)
+ self.assert_irods_obj(
+ os.path.join(self.irods_path, IRODS_OBJ_NAME + '.md5')
+ )
+ post_data = {'paths': [self.irods_path]}
+ with self.login(self.user):
+ response = self.client.post(self.post_url, post_data)
+ response_data = response.data['irods_stats']
+ self.assertEqual(len(response_data.values()), 1) # md5 not included
+ expected = {
+ self.irods_path: {
+ 'status': 200,
+ 'file_count': 1,
+ 'total_size': IRODS_OBJ_SIZE, # md5 file size not included
+ }
+ }
+ self.assertEqual(response_data, expected)
+
+ def test_post_multiple_paths(self):
+ """Test POST with multiple paths"""
+ irods_path_new = os.path.join(self.project_path, IRODS_TEMP_COLL2)
+ self.irods.collections.create(irods_path_new)
+ obj_path = os.path.join(self.irods_path, IRODS_OBJ_NAME)
+ make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
+ post_data = {'paths': [self.irods_path, irods_path_new]}
+ with self.login(self.user):
+ response = self.client.post(self.post_url, post_data)
+ expected = {
+ self.irods_path: {
+ 'status': 200,
+ 'file_count': 1,
+ 'total_size': IRODS_OBJ_SIZE,
+ },
+ irods_path_new: {'status': 200, 'file_count': 0, 'total_size': 0},
+ }
+ self.assertEqual(response.data['irods_stats'], expected)
+
+ def test_post_dupe_path(self):
+ """Test POST with duplicate path"""
+ # This should not happen in the UI, but testing in case of view abuse
+ post_data = {'paths': [self.irods_path, self.irods_path]}
+ with self.login(self.user):
+ response = self.client.post(self.post_url, post_data)
+ # We should only get one result
+ self.assertEqual(len(response.data['irods_stats'].values()), 1)
+
+ def test_post_coll_not_found(self):
+ """Test POST for stats on non-existing collections"""
+ fail_path = os.path.join(self.irods_path, IRODS_FAIL_COLL)
+ self.assertEqual(self.irods.collections.exists(fail_path), False)
+ post_data = {'paths': [fail_path]}
+ with self.login(self.user):
+ response = self.client.post(self.post_url, post_data)
+ self.assertEqual(response.status_code, 200)
+ expected = {fail_path: {'status': 404}}
+ self.assertEqual(response.data['irods_stats'], expected)
+
+ def test_post_non_project_path(self):
+ """Test POST with path not belonging to project"""
+ self.assertEqual(
+ self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True
+ )
+ post_data = {'paths': [IRODS_NON_PROJECT_PATH]}
+ with self.login(self.user):
+ response = self.client.post(self.post_url, post_data)
+ self.assertEqual(response.status_code, 200)
+ expected = {IRODS_NON_PROJECT_PATH: {'status': 400}}
+ self.assertEqual(response.data['irods_stats'], expected)
+
+
+class TestIrodsObjectListAjaxView(IrodsbackendViewTestBase):
+ """Tests for IrodsObjectListAjaxView"""
+
+ def test_get_empty_coll(self):
+ """Test IrodsObjectListAjaxView GET with empty collection"""
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='list',
+ path=self.irods_path,
+ project=self.project,
+ md5=0,
+ )
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(len(response.data['irods_data']), 0)
+
+ def test_get_data_obj(self):
+ """Test GET with data object"""
+ # Put data object in iRODS
+ obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
+ data_obj = make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
+
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='list',
+ path=self.irods_path,
+ project=self.project,
+ md5=0,
+ )
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(len(response.data['irods_data']), 1)
+
+ list_obj = response.data['irods_data'][0]
+ self.assertNotIn('md5_file', list_obj)
+ self.assertEqual(data_obj.name, list_obj['name'])
+ self.assertEqual(data_obj.path, list_obj['path'])
+ self.assertEqual(data_obj.size, IRODS_OBJ_SIZE)
+
+ def test_get_md5_file(self):
+ """Test GET with data object and md5 file"""
+ obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
+ obj = make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
+ # Create MD5 data object in iRODS
+ self.make_irods_md5_object(obj)
+
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='list',
+ path=self.irods_path,
+ project=self.project,
+ md5=1,
+ )
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(len(response.data['irods_data']), 1) # Still 1
+ self.assertEqual(response.data['irods_data'][0]['md5_file'], True)
+
+ def test_get_md5_no_file(self):
+ """Test GET with md5 set True and no md5 file"""
+ obj_path = self.irods_path + '/' + IRODS_OBJ_NAME
+ make_object(self.irods, obj_path, IRODS_OBJ_CONTENT)
+
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='list',
+ path=self.irods_path,
+ project=self.project,
+ md5=1,
+ )
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(len(response.data['irods_data']), 1)
+ self.assertEqual(response.data['irods_data'][0]['md5_file'], False)
+
+ def test_get_coll_not_found(self):
+ """Test GET with non-existing collection"""
+ fail_path = self.irods_path + '/' + IRODS_FAIL_COLL
+ self.assertEqual(self.irods.collections.exists(fail_path), False)
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='list', path=fail_path, project=self.project, md5=0
+ )
+ )
+ self.assertEqual(response.status_code, 404)
+
+ def test_get_coll_not_in_project(self):
+ """Test GET with collection not in project"""
+ self.assertEqual(
+ self.irods.collections.exists(IRODS_NON_PROJECT_PATH), True
+ )
+ with self.login(self.user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='list',
+ path=IRODS_NON_PROJECT_PATH,
+ project=self.project,
+ md5=0,
+ )
+ )
+ self.assertEqual(response.status_code, 400)
+
+ def test_get_no_access(self):
+ """Test GET with no access to collection"""
+ new_user = self.make_user('new_user')
+ self.make_assignment(
+ self.project, new_user, self.role_contributor
+ ) # No taskflow
+ with self.login(new_user):
+ response = self.client.get(
+ self.irods_backend.get_url(
+ view='list',
+ path=self.irods_path,
+ project=self.project,
+ md5=0,
+ )
+ )
+ self.assertEqual(response.status_code, 403)
diff --git a/irodsbackend/views.py b/irodsbackend/views.py
index a6bd5e5ca..c2df8c3c6 100644
--- a/irodsbackend/views.py
+++ b/irodsbackend/views.py
@@ -148,40 +148,31 @@ def get(self, *args, **kwargs):
return Response(self._get_detail(ex), status=500)
def post(self, request, *args, **kwargs):
- data = {'coll_objects': []}
- q_dict = request.POST
+ ret = {}
project_path = self.irods_backend.get_path(self.project)
try:
irods = self.irods_backend.get_session_obj()
except Exception as ex:
return JsonResponse(self._get_detail(ex), status=500)
- for path in q_dict.getlist('paths'):
- if project_path not in path:
- data['coll_objects'].append(
- {'path': path, 'status': '400', 'stats': {}}
- )
- break
- if not self._check_collection_perm(path, request.user, irods):
- data['coll_objects'].append(
- {'path': path, 'status': '403', 'stats': {}}
- )
- break
- try:
- if not irods.collections.exists(path):
- data['coll_objects'].append(
- {'path': path, 'status': '404', 'stats': {}}
- )
- else:
- ret_data = self.irods_backend.get_object_stats(irods, path)
- data['coll_objects'].append(
- {'path': path, 'status': '200', 'stats': ret_data}
- )
- except Exception:
- data['coll_objects'].append(
- {'path': path, 'status': '500', 'stats': {}}
- )
+ for p in request.POST.getlist('paths'):
+ d = {}
+ if not p.startswith(project_path):
+ d['status'] = 400
+ elif not self._check_collection_perm(p, request.user, irods):
+ d['status'] = 403
+ else:
+ try:
+ if irods.collections.exists(p):
+ stats = self.irods_backend.get_object_stats(irods, p)
+ d.update(stats)
+ d['status'] = 200
+ else:
+ d['status'] = 404
+ except Exception:
+ d['status'] = 500
+ ret[p] = d
irods.cleanup()
- return Response(data, status=200)
+ return Response({'irods_stats': ret}, status=200)
class IrodsObjectListAjaxView(BaseIrodsAjaxView):
@@ -190,18 +181,31 @@ class IrodsObjectListAjaxView(BaseIrodsAjaxView):
permission_required = 'irodsbackend.view_files'
def get(self, request, *args, **kwargs):
- md5 = request.GET.get('md5')
- colls = request.GET.get('colls')
+ check_md5 = bool(int(request.GET.get('md5')))
+ include_colls = bool(int(request.GET.get('colls')))
# Get files
try:
with self.irods_backend.get_session() as irods:
- ret_data = self.irods_backend.get_objects(
+ objs = self.irods_backend.get_objects(
irods,
self.path,
- check_md5=bool(int(md5)),
- include_colls=bool(int(colls)),
+ include_md5=check_md5,
+ include_colls=include_colls,
)
- return Response(ret_data, status=200)
+ ret = []
+ md5_paths = []
+ if check_md5:
+ md5_paths = [
+ o['path'] for o in objs if o['path'].endswith('.md5')
+ ]
+ for o in objs:
+ if o['type'] == 'coll' and include_colls:
+ ret.append(o)
+ elif o['type'] == 'obj' and not o['path'].endswith('.md5'):
+ if check_md5:
+ o['md5_file'] = o['path'] + '.md5' in md5_paths
+ ret.append(o)
+ return Response({'irods_data': ret}, status=200)
except Exception as ex:
return Response(self._get_detail(ex), status=500)
diff --git a/landingzones/plugins.py b/landingzones/plugins.py
index ce8a09e85..9a5d78be5 100644
--- a/landingzones/plugins.py
+++ b/landingzones/plugins.py
@@ -19,6 +19,8 @@
from landingzones.constants import (
STATUS_ALLOW_UPDATE,
+ STATUS_BUSY,
+ STATUS_FINISHED,
ZONE_STATUS_MOVED,
ZONE_STATUS_DELETED,
)
@@ -148,6 +150,43 @@ def get_object_link(self, model_str, uuid):
'label': obj.get_display_name(),
}
+ def get_statistics(self):
+ """
+ Return app statistics as a dict. Should take the form of
+ {id: {label, value, url (optional), description (optional)}}.
+
+ :return: Dict
+ """
+ return {
+ 'zones_total': {
+ 'label': 'Total zones',
+ 'value': LandingZone.objects.count(),
+ },
+ 'zones_active': {
+ 'label': 'Active zones',
+ 'value': LandingZone.objects.filter(
+ status__in=STATUS_ALLOW_UPDATE
+ ).count(),
+ 'description': 'Landing zones available for use (active or '
+ 'failed)',
+ },
+ 'zones_finished': {
+ 'label': 'Finished zones',
+ 'value': LandingZone.objects.filter(
+ status__in=STATUS_FINISHED
+ ).count(),
+ 'description': 'Landing zones finished successfully, deleted '
+ 'or not created',
+ },
+ 'zones_busy': {
+ 'label': 'Busy zones',
+ 'value': LandingZone.objects.filter(
+ status__in=STATUS_BUSY
+ ).count(),
+ 'description': 'Landing zones with an ongoing transaction',
+ },
+ }
+
def get_project_list_value(self, column_id, project, user):
"""
Return a value for the optional additional project list column specific
diff --git a/landingzones/static/landingzones/js/landingzones.js b/landingzones/static/landingzones/js/landingzones.js
index 7cb82c82f..baa57463f 100644
--- a/landingzones/static/landingzones/js/landingzones.js
+++ b/landingzones/static/landingzones/js/landingzones.js
@@ -20,12 +20,10 @@ var updateZoneStatus = function() {
// Make the POST request to retrieve zone statuses
if (zoneUuids.length > 0) {
$.ajax({
- url: zoneStatusUrl,
+ url: zoneStatusURL,
method: 'POST',
dataType: 'JSON',
- data: {
- zone_uuids: zoneUuids
- }
+ data: {zone_uuids: zoneUuids}
}).done(function(data) {
$('.sodar-lz-zone-tr-existing').each(function() {
var zoneUuid = $(this).attr('data-zone-uuid');
@@ -62,15 +60,12 @@ var updateZoneStatus = function() {
}
if (['CREATING', 'NOT CREATED', 'MOVED', 'DELETED'].includes(zoneStatus)) {
zoneTr.find('p#sodar-lz-zone-stats-container-' + zoneUuid).hide();
-
if (zoneStatus === 'MOVED') {
var statusMovedSpan = zoneTr.find(
'span#sodar-lz-zone-status-moved-' + zoneUuid
);
statusMovedSpan.html(
- '' +
+ '' +
' ' +
'Browse files in sample sheet
'
);
@@ -78,7 +73,6 @@ var updateZoneStatus = function() {
}
// Button modification
- // if (zoneStatus !== 'ACTIVE' && zoneStatus !== 'FAILED' && isSuperuser) {}
if (zoneStatus !== 'ACTIVE' && zoneStatus !== 'FAILED' && !isSuperuser) {
zoneTr.find('td.sodar-lz-zone-title').addClass('text-muted');
zoneTr.find('td.sodar-lz-zone-assay').addClass('text-muted');
@@ -96,7 +90,9 @@ var updateZoneStatus = function() {
zoneTr.find('td.sodar-lz-zone-title').removeClass('text-muted');
zoneTr.find('td.sodar-lz-zone-assay').removeClass('text-muted');
zoneTr.find('td.sodar-lz-zone-status-info').removeClass('text-muted');
- zoneTr.find('p#sodar-lz-zone-stats-container-' + zoneUuid).show();
+ if (zoneStatus !== 'DELETED') {
+ zoneTr.find('p#sodar-lz-zone-stats-container-' + zoneUuid).show();
+ }
zoneTr.find('.btn').each(function() {
if ($(this).is('button')) {
$(this).removeAttr('disabled');
diff --git a/landingzones/tasks_taskflow.py b/landingzones/tasks_taskflow.py
index 719117968..875fee5b7 100644
--- a/landingzones/tasks_taskflow.py
+++ b/landingzones/tasks_taskflow.py
@@ -19,6 +19,7 @@
from landingzones.constants import (
STATUS_BUSY,
+ STATUS_FINISHED,
ZONE_STATUS_FAILED,
ZONE_STATUS_NOT_CREATED,
ZONE_STATUS_MOVED,
@@ -381,10 +382,13 @@ def execute(
*args,
**kwargs
):
- self.set_status(
- landing_zone, flow_name, status, status_info, extra_data
- )
- self.data_modified = True
+ # Prevent setting status if already finished (see #1909)
+ landing_zone.refresh_from_db()
+ if landing_zone.status not in STATUS_FINISHED:
+ self.set_status(
+ landing_zone, flow_name, status, status_info, extra_data
+ )
+ self.data_modified = True
super().execute(*args, **kwargs)
def revert(
diff --git a/landingzones/templates/landingzones/_details_card.html b/landingzones/templates/landingzones/_details_card.html
index c53dd9ca2..99d2e95a0 100644
--- a/landingzones/templates/landingzones/_details_card.html
+++ b/landingzones/templates/landingzones/_details_card.html
@@ -17,16 +17,13 @@
.sodar-lz-detail-table td:first-child {
border-left: 0;
}
-
.sodar-lz-detail-table thead tr th:nth-child(1) {
width: 250px;
}
-
.sodar-lz-detail-table thead tr th:nth-child(3) {
white-space: nowrap;
width: 150px;
}
-
/* Responsive modifications */
@media screen and (max-width: 1000px) {
.table.sodar-lz-detail-table tr th:nth-child(2) {
@@ -41,6 +38,8 @@
@@ -61,7 +60,7 @@
{% include 'landingzones/_zone_item.html' with item=zone details_card_mode=True %}
{% endfor %}
{% else %}
-
+
No active landing zones found.
|
diff --git a/landingzones/templates/landingzones/project_zones.html b/landingzones/templates/landingzones/project_zones.html
index 833864e3c..6a0110f5a 100644
--- a/landingzones/templates/landingzones/project_zones.html
+++ b/landingzones/templates/landingzones/project_zones.html
@@ -56,13 +56,11 @@
display: none;
}
}
-
@media screen and (max-width: 1200px) {
.table.sodar-lz-table thead tr th:nth-child(1) {
width: 250px;
}
}
-
@media screen and (max-width: 900px) {
.table.sodar-lz-table thead tr th:nth-child(2) {
display: none;
@@ -71,7 +69,6 @@
display: none;
}
}
-
@media screen and (max-width: 450px) {
.table.sodar-lz-table thead tr th:nth-child(4) {
display: none;
@@ -157,8 +154,7 @@ Landing Zones
window.zoneStatusUpdated = false;
window.statusInterval = {{ zone_status_interval }} * 1000;
window.irodsShowChecksumCol = true;
- var zoneStatusUrl = "{% url 'landingzones:ajax_status' project=project.sodar_uuid %}";
-
+ var zoneStatusURL = "{% url 'landingzones:ajax_status' project=project.sodar_uuid %}";
var currentUserURL = "{% url 'projectroles:ajax_user_current' %}";
diff --git a/landingzones/tests/test_plugins.py b/landingzones/tests/test_plugins.py
new file mode 100644
index 000000000..2af97f2a6
--- /dev/null
+++ b/landingzones/tests/test_plugins.py
@@ -0,0 +1,138 @@
+"""Tests for plugins in the landingzones app"""
+
+from test_plus.test import TestCase
+
+# Projectroles dependency
+from projectroles.models import SODAR_CONSTANTS
+from projectroles.plugins import ProjectAppPluginPoint
+from projectroles.tests.test_models import (
+ ProjectMixin,
+ RoleMixin,
+ RoleAssignmentMixin,
+)
+
+# Samplesheets dependency
+from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR
+
+import landingzones.constants as lc
+from landingzones.tests.test_models import LandingZoneMixin
+
+
+# Local constants
+SHEET_PATH_SMALL2 = SHEET_DIR + 'i_small2.zip'
+
+
+class LandingzonesPluginTestBase(
+ ProjectMixin,
+ RoleMixin,
+ RoleAssignmentMixin,
+ SampleSheetIOMixin,
+ LandingZoneMixin,
+ TestCase,
+):
+ """Base class for landingzones plugin tests"""
+
+ def setUp(self):
+ # Init roles
+ self.init_roles()
+ # Make owner user
+ self.user_owner = self.make_user('owner')
+ # Init project and assignment
+ self.project = self.make_project(
+ 'TestProject', SODAR_CONSTANTS['PROJECT_TYPE_PROJECT'], None
+ )
+ self.owner_as = self.make_assignment(
+ self.project, self.user_owner, self.role_owner
+ )
+ # Import investigation
+ self.investigation = self.import_isa_from_file(
+ SHEET_PATH_SMALL2, self.project
+ )
+ self.investigation.irods_status = True
+ self.investigation.save()
+ self.study = self.investigation.studies.first()
+ self.assay = self.study.assays.first()
+ # Get plugin
+ self.plugin = ProjectAppPluginPoint.get_plugin('landingzones')
+
+
+class TestGetStatistics(LandingzonesPluginTestBase):
+ """Tests for get_statistics()"""
+
+ def _make_zone(self, status):
+ self.make_landing_zone(
+ 'zone_{}'.format(status.lower()),
+ self.project,
+ self.user_owner,
+ self.assay,
+ status=status,
+ )
+
+ def _assert_stats(self, expected):
+ stats = self.plugin.get_statistics()
+ for k, v in expected.items():
+ self.assertEqual(stats[k]['value'], v)
+
+ def test_get_statistics_active(self):
+ """Test get_statistics() with active zones"""
+ self._make_zone(lc.ZONE_STATUS_ACTIVE)
+ self._make_zone(lc.ZONE_STATUS_FAILED)
+ self._assert_stats(
+ {
+ 'zones_total': 2,
+ 'zones_active': 2,
+ 'zones_finished': 0,
+ 'zones_busy': 0,
+ }
+ )
+
+ def test_get_statistics_finished(self):
+ """Test get_statistics() with finished zones"""
+ self._make_zone(lc.ZONE_STATUS_MOVED)
+ self._make_zone(lc.ZONE_STATUS_DELETED)
+ self._assert_stats(
+ {
+ 'zones_total': 2,
+ 'zones_active': 0,
+ 'zones_finished': 2,
+ 'zones_busy': 0,
+ }
+ )
+
+ def test_get_statistics_busy(self):
+ """Test get_statistics() with busy zones"""
+ self._make_zone(lc.ZONE_STATUS_MOVING)
+ self._make_zone(lc.ZONE_STATUS_DELETING)
+ self._assert_stats(
+ {
+ 'zones_total': 2,
+ 'zones_active': 0,
+ 'zones_finished': 0,
+ 'zones_busy': 2,
+ }
+ )
+
+ def test_get_statistics_mixed(self):
+ """Test get_statistics() with mixed zone statuses"""
+ self._make_zone(lc.ZONE_STATUS_ACTIVE)
+ self._make_zone(lc.ZONE_STATUS_MOVING)
+ self._make_zone(lc.ZONE_STATUS_DELETED)
+ self._assert_stats(
+ {
+ 'zones_total': 3,
+ 'zones_active': 1,
+ 'zones_finished': 1,
+ 'zones_busy': 1,
+ }
+ )
+
+ def test_get_statistics_no_zones(self):
+ """Test get_statistics() with no zones"""
+ self._assert_stats(
+ {
+ 'zones_total': 0,
+ 'zones_active': 0,
+ 'zones_finished': 0,
+ 'zones_busy': 0,
+ }
+ )
diff --git a/landingzones/tests/test_ui.py b/landingzones/tests/test_ui.py
index 63b02fe3a..637cf1be2 100644
--- a/landingzones/tests/test_ui.py
+++ b/landingzones/tests/test_ui.py
@@ -16,7 +16,12 @@
from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR
from samplesheets.tests.test_sheet_config import SheetConfigMixin
-from landingzones.constants import ZONE_STATUS_CREATING
+from landingzones.constants import (
+ ZONE_STATUS_CREATING,
+ ZONE_STATUS_ACTIVE,
+ ZONE_STATUS_VALIDATING,
+ ZONE_STATUS_DELETED,
+)
from landingzones.tests.test_models import LandingZoneMixin
@@ -27,10 +32,10 @@
SHEET_PATH = SHEET_DIR + 'i_small.zip'
-class TestProjectZoneView(
+class LandingZoneUITestBase(
SampleSheetIOMixin, SheetConfigMixin, LandingZoneMixin, TestUIBase
):
- """ProjectZoneView UI tests"""
+ """Base class for landingzones UI tests"""
investigation = None
study = None
@@ -57,6 +62,14 @@ def _assert_btn_enabled(self, element, expected=True):
else:
self.assertIn('disabled', element.get_attribute('class'))
+ def _wait_for_status(self, status_elem, status):
+ """Wait for a specific status in the zone status element"""
+ for i in range(0, 25):
+ if status_elem.text == status:
+ return
+ time.sleep(1.0)
+ raise Exception('Status not changed')
+
def _wait_for_status_update(self):
"""Wait for JQuery landing zone status updates to finish"""
for i in range(0, 20):
@@ -75,6 +88,13 @@ def setUp(self):
self.user_delegate,
self.user_contributor,
]
+
+
+class TestProjectZoneView(LandingZoneUITestBase):
+ """UI tests for ProjectZoneView"""
+
+ def setUp(self):
+ super().setUp()
self.url = reverse(
'landingzones:list',
kwargs={'project': self.project.sodar_uuid},
@@ -285,6 +305,96 @@ def test_render_other_user_no_access(self):
self.assertEqual(len(zones), 2)
self._assert_element(By.CLASS_NAME, 'sodar-lz-zone-warn-access', True)
+ def test_status_update(self):
+ """Test ProjectZoneView with zone status update"""
+ self._setup_investigation()
+ self.investigation.irods_status = True
+ self.investigation.save()
+ contrib_zone = self.make_landing_zone(
+ 'contrib_zone',
+ self.project,
+ self.user_contributor,
+ self.assay,
+ status='ACTIVE',
+ )
+ self.login_and_redirect(self.user_contributor, self.url)
+ zone_status = self.selenium.find_element(
+ By.CLASS_NAME, 'sodar-lz-zone-status'
+ )
+ self.assertEqual(zone_status.text, ZONE_STATUS_ACTIVE)
+ contrib_zone.set_status(ZONE_STATUS_VALIDATING)
+ self._wait_for_status(zone_status, ZONE_STATUS_VALIDATING)
+ self.assertEqual(zone_status.text, ZONE_STATUS_VALIDATING)
+
+ def test_stats_deleted_owner(self):
+ """Test ProjectZoneView stats badge on DELETED zone as owner"""
+ self._setup_investigation()
+ self.investigation.irods_status = True
+ self.investigation.save()
+ zone = self.make_landing_zone(
+ 'contrib_zone',
+ self.project,
+ self.user_contributor,
+ self.assay,
+ status=ZONE_STATUS_ACTIVE,
+ )
+ self.login_and_redirect(self.user_contributor, self.url)
+
+ zone_status = self.selenium.find_element(
+ By.CLASS_NAME, 'sodar-lz-zone-status'
+ )
+ zone_status_info = self.selenium.find_element(
+ By.CLASS_NAME, 'sodar-lz-zone-status-info'
+ )
+ self._wait_for_status(zone_status, ZONE_STATUS_ACTIVE)
+ self.assertTrue(
+ zone_status_info.find_element(
+ By.CLASS_NAME, 'sodar-irods-stats'
+ ).is_displayed()
+ )
+ # Update status to deleted, stats badge should no longer be rendered
+ zone.set_status(ZONE_STATUS_DELETED)
+ self._wait_for_status(zone_status, ZONE_STATUS_DELETED)
+ self.assertFalse(
+ zone_status_info.find_element(
+ By.CLASS_NAME, 'sodar-irods-stats'
+ ).is_displayed()
+ )
+
+ def test_stats_deleted_superuser(self):
+ """Test ProjectZoneView stats badge on DELETED zone as superuser"""
+ self._setup_investigation()
+ self.investigation.irods_status = True
+ self.investigation.save()
+ zone = self.make_landing_zone(
+ 'contrib_zone',
+ self.project,
+ self.user_contributor,
+ self.assay,
+ status=ZONE_STATUS_ACTIVE,
+ )
+ self.login_and_redirect(self.superuser, self.url)
+
+ zone_status = self.selenium.find_element(
+ By.CLASS_NAME, 'sodar-lz-zone-status'
+ )
+ zone_status_info = self.selenium.find_element(
+ By.CLASS_NAME, 'sodar-lz-zone-status-info'
+ )
+ self._wait_for_status(zone_status, ZONE_STATUS_ACTIVE)
+ self.assertTrue(
+ zone_status_info.find_element(
+ By.CLASS_NAME, 'sodar-irods-stats'
+ ).is_displayed()
+ )
+ zone.set_status(ZONE_STATUS_DELETED)
+ self._wait_for_status(zone_status, ZONE_STATUS_DELETED)
+ self.assertFalse(
+ zone_status_info.find_element(
+ By.CLASS_NAME, 'sodar-irods-stats'
+ ).is_displayed()
+ )
+
def test_zone_buttons(self):
"""Test ProjectZoneView zone buttons"""
self._setup_investigation()
@@ -420,3 +530,113 @@ def test_zone_locked_contributor(self):
By.CLASS_NAME, 'sodar-lz-zone-status-info'
).get_attribute('class'),
)
+
+
+class TestProjectDetailView(LandingZoneUITestBase):
+ """UI tests for ProjectDetailView"""
+
+ def setUp(self):
+ super().setUp()
+ self.url = reverse(
+ 'projectroles:detail',
+ kwargs={'project': self.project.sodar_uuid},
+ )
+
+ def test_render_no_zones(self):
+ """Test ProjectDetailView with no zones"""
+ self._setup_investigation()
+ self.investigation.irods_status = True
+ self.investigation.save()
+ self.login_and_redirect(self.user_owner, self.url)
+ self._assert_element(By.ID, 'sodar-lz-detail-table-no-zones', True)
+ self._assert_element(By.CLASS_NAME, 'sodar-lz-zone-tr-existing', False)
+
+ def test_render_own_zone(self):
+ """Test ProjectDetailView as contributor with own zone"""
+ self._setup_investigation()
+ self.investigation.irods_status = True
+ self.investigation.save()
+ contrib_zone = self.make_landing_zone(
+ 'contrib_zone', self.project, self.user_contributor, self.assay
+ )
+ self.login_and_redirect(self.user_contributor, self.url)
+ self._assert_element(By.ID, 'sodar-lz-detail-table-no-zones', False)
+ zones = self.selenium.find_elements(
+ By.CLASS_NAME, 'sodar-lz-zone-tr-existing'
+ )
+ self.assertEqual(len(zones), 1)
+ self.assertEqual(
+ zones[0].get_attribute('data-zone-uuid'),
+ str(contrib_zone.sodar_uuid),
+ )
+
+ def test_render_multiple_zones(self):
+ """Test ProjectDetailView as contributor with multiple own zones"""
+ self._setup_investigation()
+ self.investigation.irods_status = True
+ self.investigation.save()
+ self.make_landing_zone(
+ 'contrib_zone', self.project, self.user_contributor, self.assay
+ )
+ self.make_landing_zone(
+ 'contrib_zone2', self.project, self.user_contributor, self.assay
+ )
+ self.login_and_redirect(self.user_contributor, self.url)
+ zones = self.selenium.find_elements(
+ By.CLASS_NAME, 'sodar-lz-zone-tr-existing'
+ )
+ self.assertEqual(len(zones), 2)
+
+ def test_render_other_zone(self):
+ """Test ProjectDetailView as contributor with other user's zone"""
+ self._setup_investigation()
+ self.investigation.irods_status = True
+ self.investigation.save()
+ self.make_landing_zone(
+ 'owner_zone', self.project, self.user_owner, self.assay
+ )
+ self.login_and_redirect(self.user_contributor, self.url)
+ self._assert_element(By.ID, 'sodar-lz-detail-table-no-zones', True)
+ self._assert_element(By.CLASS_NAME, 'sodar-lz-zone-tr-existing', False)
+
+ def test_render_as_owner(self):
+ """Test ProjectDetailView as owner with own and other zones"""
+ self._setup_investigation()
+ self.investigation.irods_status = True
+ self.investigation.save()
+ owner_zone = self.make_landing_zone(
+ 'owner_zone', self.project, self.user_owner, self.assay
+ )
+ self.make_landing_zone(
+ 'contrib_zone', self.project, self.user_contributor, self.assay
+ )
+ self.login_and_redirect(self.user_owner, self.url)
+ zones = self.selenium.find_elements(
+ By.CLASS_NAME, 'sodar-lz-zone-tr-existing'
+ )
+ self.assertEqual(len(zones), 1) # Only own zone should be visible
+ self.assertEqual(
+ zones[0].get_attribute('data-zone-uuid'),
+ str(owner_zone.sodar_uuid),
+ )
+
+ def test_update_status(self):
+ """Test ProjectDetailView with zone status update"""
+ self._setup_investigation()
+ self.investigation.irods_status = True
+ self.investigation.save()
+ contrib_zone = self.make_landing_zone(
+ 'contrib_zone',
+ self.project,
+ self.user_contributor,
+ self.assay,
+ status='ACTIVE',
+ )
+ self.login_and_redirect(self.user_contributor, self.url)
+ zone_status = self.selenium.find_element(
+ By.CLASS_NAME, 'sodar-lz-zone-status'
+ )
+ self.assertEqual(zone_status.text, ZONE_STATUS_ACTIVE)
+ contrib_zone.set_status(ZONE_STATUS_VALIDATING)
+ self._wait_for_status(zone_status, ZONE_STATUS_VALIDATING)
+ self.assertEqual(zone_status.text, ZONE_STATUS_VALIDATING)
diff --git a/landingzones/tests/test_views_taskflow.py b/landingzones/tests/test_views_taskflow.py
index 14562bdcf..0826c638a 100644
--- a/landingzones/tests/test_views_taskflow.py
+++ b/landingzones/tests/test_views_taskflow.py
@@ -67,6 +67,8 @@
ZONE_BASE_COLLS = [MISC_FILES_COLL, RESULTS_COLL, TRACK_HUBS_COLL]
ZONE_PLUGIN_COLLS = ['0815-N1-DNA1', '0815-T1-DNA1']
ZONE_ALL_COLLS = ZONE_BASE_COLLS + ZONE_PLUGIN_COLLS
+RAW_DATA_COLL = 'RawData'
+MAX_QUANT_COLL = 'MaxQuantResults'
class LandingZoneTaskflowMixin:
@@ -239,8 +241,7 @@ def test_create_zone_colls(self):
'configuration': '',
}
with self.login(self.user):
- response = self.client.post(self.url, values)
- self.assertRedirects(response, self.redirect_url)
+ self.client.post(self.url, values)
self.assert_zone_count(1)
zone = LandingZone.objects.first()
@@ -287,8 +288,7 @@ def test_create_zone_colls_plugin(self):
'configuration': '',
}
with self.login(self.user):
- response = self.client.post(self.url, values)
- self.assertRedirects(response, self.redirect_url)
+ self.client.post(self.url, values)
self.assert_zone_count(1)
zone = LandingZone.objects.first()
@@ -304,6 +304,38 @@ def test_create_zone_colls_plugin(self):
self.assert_irods_access(
self.user.username, os.path.join(zone_path, c), IRODS_ACCESS_OWN
)
+ # These should not be created for this plugin
+ for c in [MAX_QUANT_COLL, RAW_DATA_COLL]:
+ self.assert_irods_coll(zone, c, False)
+
+ def test_create_zone_colls_plugin_shortcuts(self):
+ """Test landingzones creation with shortcut collections in plugin"""
+ self.assertEqual(LandingZone.objects.count(), 0)
+ # Set pep_ms plugin
+ self.assay.measurement_type = {'name': 'protein expression profiling'}
+ self.assay.technology_type = {'name': 'mass spectrometry'}
+ self.assay.save()
+ # NOTE: update_cache() not implemented in this plugin
+
+ values = {
+ 'assay': str(self.assay.sodar_uuid),
+ 'title_suffix': ZONE_SUFFIX,
+ 'description': ZONE_DESC,
+ 'create_colls': True,
+ 'restrict_colls': False,
+ 'configuration': '',
+ }
+ with self.login(self.user):
+ self.client.post(self.url, values)
+
+ self.assert_zone_count(1)
+ zone = LandingZone.objects.first()
+ self.assert_zone_status(zone, ZONE_STATUS_ACTIVE)
+ zone_colls = ZONE_BASE_COLLS + [RAW_DATA_COLL, MAX_QUANT_COLL]
+ for c in zone_colls:
+ self.assert_irods_coll(zone, c, True)
+ for c in ZONE_PLUGIN_COLLS:
+ self.assert_irods_coll(zone, c, False)
# TODO: Test without sodarcache (see issue #1157)
@@ -332,8 +364,7 @@ def test_create_zone_colls_plugin_restrict(self):
'configuration': '',
}
with self.login(self.user):
- response = self.client.post(self.url, values)
- self.assertRedirects(response, self.redirect_url)
+ self.client.post(self.url, values)
self.assert_zone_count(1)
zone = LandingZone.objects.first()
diff --git a/landingzones/views.py b/landingzones/views.py
index 0421f19ca..1308d69c1 100644
--- a/landingzones/views.py
+++ b/landingzones/views.py
@@ -32,7 +32,6 @@
STATUS_ALLOW_UPDATE,
STATUS_FINISHED,
STATUS_INFO_DELETE_NO_COLL,
- ZONE_STATUS_OK,
ZONE_STATUS_DELETED,
)
from landingzones.forms import LandingZoneForm
@@ -174,9 +173,7 @@ def submit_create(
)
tl_event.add_object(obj=zone, label='zone', name=zone.title)
tl_event.add_object(
- obj=zone.user,
- label='user',
- name=zone.user.username,
+ obj=zone.user, label='user', name=zone.user.username
)
tl_event.add_object(
obj=zone.assay, label='assay', name=zone.assay.get_name()
@@ -187,6 +184,7 @@ def submit_create(
colls = []
if create_colls:
logger.debug('Creating default landing zone collections..')
+ assay_path = irods_backend.get_path(zone.assay)
colls = [RESULTS_COLL, MISC_FILES_COLL, TRACK_HUBS_COLL]
plugin = zone.assay.get_plugin()
# First try the cache
@@ -199,14 +197,24 @@ def submit_create(
project=zone.project,
)
if cache_obj and cache_obj.data:
- assay_path = irods_backend.get_path(zone.assay)
colls += [
p.replace(assay_path + '/', '')
for p in cache_obj.data['paths'].keys()
]
logger.debug('Retrieved collections from cache')
- elif plugin:
- pass # TODO: Build tables, get rows directly from plugin?
+ # TODO: If no cache, build tables and get rows directly from plugin?
+ # Add shortcut paths
+ if plugin:
+ shortcuts = plugin.get_shortcuts(zone.assay) or []
+ for s in shortcuts:
+ path = s.get('path')
+ if path:
+ path = path.replace(assay_path + '/', '')
+ if path and path not in colls:
+ colls.append(path)
+ logger.debug(
+ 'Added shorctut collection "{}"'.format(s.get('id'))
+ )
logger.debug('Collections to be created: {}'.format(', '.join(colls)))
flow_name = 'landing_zone_create'
@@ -257,15 +265,11 @@ def update_zone(self, zone, request=None):
user=user,
event_name='zone_update',
description=description,
- status_type=ZONE_STATUS_OK,
+ status_type='OK',
extra_data=tl_extra,
)
tl_event.add_object(obj=zone, label='zone', name=zone.title)
- tl_event.add_object(
- obj=user,
- label='user',
- name=user.username,
- )
+ tl_event.add_object(obj=user, label='user', name=user.username)
tl_event.add_object(
obj=zone.assay, label='assay', name=zone.assay.get_name()
)
@@ -318,11 +322,7 @@ def submit_delete(self, zone):
if zone_exists: # Submit with taskflow
flow_name = 'landing_zone_delete'
flow_data = self.get_flow_data(
- zone,
- flow_name,
- {
- 'zone_uuid': str(zone.sodar_uuid),
- },
+ zone, flow_name, {'zone_uuid': str(zone.sodar_uuid)}
)
taskflow.submit(
project=project,
diff --git a/landingzones/views_ajax.py b/landingzones/views_ajax.py
index 6e02698fe..2979c1157 100644
--- a/landingzones/views_ajax.py
+++ b/landingzones/views_ajax.py
@@ -24,21 +24,17 @@ def check_zone_permission(self, zone, user):
def post(self, request, *args, **kwargs):
zone_uuids = request.data.getlist('zone_uuids[]')
project = self.get_project()
-
# Filter landing zones based on UUIDs and project
zones = LandingZone.objects.filter(
sodar_uuid__in=zone_uuids, project=project
)
-
status_dict = {}
for zone in zones:
# Check permissions
if not self.check_zone_permission(zone, self.request.user):
continue
-
status_dict[str(zone.sodar_uuid)] = {
'status': zone.status,
'status_info': zone.status_info,
}
-
return Response(status_dict, status=200)
diff --git a/landingzones/views_api.py b/landingzones/views_api.py
index 578364222..3dda7365f 100644
--- a/landingzones/views_api.py
+++ b/landingzones/views_api.py
@@ -280,7 +280,6 @@ def perform_update(self, serializer):
if not self._validate_update_fields(serializer):
# Should raise 400 Bad Request
raise ValidationError('{}Invalid update fields'.format(ex_msg))
-
# If all is OK, go forward with object update and taskflow submission
super().perform_update(serializer)
try:
diff --git a/requirements/base.txt b/requirements/base.txt
index e5b082997..a77640a9c 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -6,7 +6,7 @@ wheel==0.40.0
setuptools==67.6.0
# Django
-django==3.2.23
+django==3.2.25
# Configuration
django-environ==0.10.0
@@ -70,8 +70,8 @@ mistune==2.0.5
django-autocomplete-light==3.9.4
# SODAR Core
-django-sodar-core==0.13.3
-# -e git+https://github.com/bihealth/sodar-core.git@11b666cd03c555d0b95c3de547fe312f93263f29#egg=django-sodar-core
+django-sodar-core==0.13.4
+# -e git+https://github.com/bihealth/sodar-core.git@be012e5536bacf8bfbfe95e3c930324edae0309b#egg=django-sodar-core
# Celery
celery==5.2.7
@@ -102,8 +102,8 @@ pronto==2.5.0 # For OWL conversion
# CUBI ISA-Tab templates
cookiecutter==2.2.3
-cubi-isa-templates==0.1.1
-# -e git+https://github.com/bihealth/cubi-isa-templates.git@c4139e147fd5db10bd6478460edbbce157abe425#egg=cubi-isa-templates
+cubi-isa-templates==0.1.2
+# -e git+https://github.com/bihealth/cubi-isa-templates.git@d3ebe866f3823d81fdb358042ef7ed4d8b8fd100#egg=cubi-isa-templates
# Taskflow requirements
tooz==3.0.0
diff --git a/requirements/ldap.txt b/requirements/ldap.txt
index 5cbb00d99..8960c63dd 100644
--- a/requirements/ldap.txt
+++ b/requirements/ldap.txt
@@ -1,5 +1,5 @@
# Optional LDAP dependencies go here
-r base.txt
-django-auth-ldap==4.1.0
-python-ldap==3.4.3
+django-auth-ldap>=4.6.0, <4.7
+python-ldap>=3.4.4, <3.5
diff --git a/samplesheets/models.py b/samplesheets/models.py
index 96d0c5d06..5d939ffcb 100644
--- a/samplesheets/models.py
+++ b/samplesheets/models.py
@@ -1381,7 +1381,8 @@ def save(self, *args, **kwargs):
def _validate_action(self):
"""Validate the action field"""
- if self.action != IRODS_REQUEST_ACTION_DELETE:
+ # Compare against uppercase string to support legacy requests
+ if self.action.upper() != IRODS_REQUEST_ACTION_DELETE:
raise ValidationError(
'This model currently only supports the action "{}"'.format(
IRODS_REQUEST_ACTION_DELETE
diff --git a/samplesheets/plugins.py b/samplesheets/plugins.py
index 9d81bf585..f93aead43 100644
--- a/samplesheets/plugins.py
+++ b/samplesheets/plugins.py
@@ -213,23 +213,23 @@ class ProjectAppPlugin(
'igv_omit_bam': {
'scope': SODAR_CONSTANTS['APP_SETTING_SCOPE_PROJECT'],
'type': 'STRING',
- 'label': 'BAM files to omit from IGV sessions',
- 'default': '',
- 'placeholder': ', '.join(settings.SHEETS_IGV_OMIT_BAM),
- 'description': 'Comma separated list of BAM file suffixes to omit '
- 'from generated IGV sessions. Overrides site-wide setting, affects '
- 'cancer and germline projects.',
+ 'label': 'BAM and CRAM paths to omit from IGV sessions',
+ 'default': ', '.join(settings.SHEETS_IGV_OMIT_BAM),
+ 'description': 'Comma-separated list of iRODS path glob patterns '
+ 'for omitting BAM and CRAM files from IGV sessions and study '
+ 'shortcuts. Overrides site-wide setting, affects cancer and '
+ 'germline projects. Update sheet cache after updating this value.',
'user_modifiable': True,
},
'igv_omit_vcf': {
'scope': SODAR_CONSTANTS['APP_SETTING_SCOPE_PROJECT'],
'type': 'STRING',
- 'label': 'VCF files to omit from IGV sessions',
- 'default': '',
- 'placeholder': ', '.join(settings.SHEETS_IGV_OMIT_VCF),
- 'description': 'Comma separated list of VCF file suffixes to omit '
- 'from generated IGV sessions. Overrides site-wide setting, affects '
- 'cancer and germline projects.',
+ 'label': 'VCF paths to omit from IGV sessions',
+ 'default': ', '.join(settings.SHEETS_IGV_OMIT_VCF),
+ 'description': 'Comma-separated list of iRODS path glob patterns '
+ 'for omitting VCF files from IGV sessions and study shortcuts. '
+ 'Overrides site-wide setting, affects cancer and germline '
+ 'projects. Update sheet cache after updating this value.',
'user_modifiable': True,
},
}
@@ -360,7 +360,7 @@ def _get_search_files(cls, search_terms, user, irods_backend):
ret = []
try:
with irods_backend.get_session() as irods:
- obj_data = irods_backend.get_objects(
+ obj_list = irods_backend.get_objects(
irods=irods,
path=irods_backend.get_projects_path(),
name_like=search_terms,
@@ -386,7 +386,7 @@ def _get_search_files(cls, search_terms, user, irods_backend):
for a in Assay.objects.filter(study__in=studies.values())
}
- for o in obj_data['irods_data']:
+ for o in obj_list:
project_uuid = irods_backend.get_uuid_from_path(
o['path'], obj_type='project'
)
diff --git a/samplesheets/studyapps/cancer/plugins.py b/samplesheets/studyapps/cancer/plugins.py
index 039acd837..169f6f411 100644
--- a/samplesheets/studyapps/cancer/plugins.py
+++ b/samplesheets/studyapps/cancer/plugins.py
@@ -18,10 +18,7 @@
get_latest_file_path,
)
from samplesheets.studyapps.utils import get_igv_session_url, get_igv_irods_url
-from samplesheets.utils import (
- get_isa_field_name,
- get_last_material_index,
-)
+from samplesheets.utils import get_isa_field_name, get_last_material_index
logger = logging.getLogger(__name__)
@@ -85,8 +82,8 @@ def get_shortcut_column(self, study, study_tables):
'files': {
'type': 'modal',
'icon': 'mdi:folder-open-outline',
- 'title': 'View links to BAM, VCF and IGV session files '
- 'for the case',
+ 'title': 'View links to BAM/CRAM, VCF and IGV session '
+ 'files for the case',
},
},
'data': [],
@@ -172,7 +169,7 @@ def get_shortcut_links(self, study, study_tables, **kwargs):
'title': 'Case-Wise Links for {}'.format(case_id),
'data': {
'session': {'title': 'IGV Session File', 'files': []},
- 'bam': {'title': 'BAM Files', 'files': []},
+ 'bam': {'title': 'BAM/CRAM Files', 'files': []},
'vcf': {'title': 'VCF Files', 'files': []},
},
}
@@ -190,7 +187,7 @@ def _add_lib_path(library_name, file_type):
'extra_links': [
{
'label': 'Add {} file to IGV'.format(
- file_type.upper()
+ 'BAM/CRAM' if file_type == 'bam' else 'VCF'
),
'icon': 'mdi:plus-thick',
'url': get_igv_irods_url(path, merge=True),
diff --git a/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py b/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py
index ac4604a9e..8634c0cf9 100644
--- a/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py
+++ b/samplesheets/studyapps/cancer/tests/test_plugins_taskflow.py
@@ -18,7 +18,7 @@
from samplesheets.tests.test_models import SampleSheetModelMixin
from samplesheets.tests.test_views_taskflow import SampleSheetTaskflowMixin
from samplesheets.plugins import SampleSheetStudyPluginPoint
-from samplesheets.studyapps.germline.utils import get_pedigree_file_path
+from samplesheets.studyapps.cancer.utils import get_library_file_path
from samplesheets.studyapps.utils import get_igv_session_url
@@ -143,6 +143,28 @@ def test_get_shortcut_column_files(self):
self.assertEqual(sc['data'][i]['igv']['enabled'], False)
self.assertEqual(sc['data'][i]['files']['enabled'], False)
+ def test_get_shortcut_column_cram(self):
+ """Test get_shortcut_column() with CRAM file in iRODS"""
+ self.irods.collections.create(self.source_path)
+ cram_path = os.path.join(
+ self.source_path, '{}_test.cram'.format(SAMPLE_ID_NORMAL)
+ )
+ vcf_path = os.path.join(
+ self.source_path, '{}_test.vcf.gz'.format(SAMPLE_ID_NORMAL)
+ )
+ self.irods.data_objects.create(cram_path)
+ self.irods.data_objects.create(vcf_path)
+
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sc = self.plugin.get_shortcut_column(self.study, study_tables)
+
+ self.assertEqual(sc['data'][0]['igv']['enabled'], True)
+ self.assertEqual(sc['data'][0]['files']['enabled'], True)
+ for i in range(1, 4):
+ self.assertEqual(sc['data'][i]['igv']['enabled'], False)
+ self.assertEqual(sc['data'][i]['files']['enabled'], False)
+
def test_get_shortcut_links(self):
"""Test get_shortcut_links() with no cache item or files"""
study_tables = self.tb.build_study_tables(self.study)
@@ -197,21 +219,84 @@ def test_get_shortcut_links_files(self):
self.assertEqual(
sl['data']['bam']['files'][0]['url'],
settings.IRODS_WEBDAV_URL
- + get_pedigree_file_path('bam', self.source, study_tables),
+ + get_library_file_path(
+ self.assay,
+ LIBRARY_ID_NORMAL,
+ 'bam',
+ self.irods_backend,
+ self.irods,
+ ),
+ )
+ self.assertEqual(
+ sl['data']['vcf']['files'][0]['url'],
+ settings.IRODS_WEBDAV_URL
+ + get_library_file_path(
+ self.assay,
+ LIBRARY_ID_NORMAL,
+ 'vcf',
+ self.irods_backend,
+ self.irods,
+ ),
+ )
+
+ def test_get_shortcut_links_cram(self):
+ """Test get_shortcut_links() with CRAM file in iRODS"""
+ self.irods.collections.create(self.source_path)
+ cram_path = os.path.join(
+ self.source_path, '{}_test.cram'.format(SAMPLE_ID_NORMAL)
+ )
+ vcf_path = os.path.join(
+ self.source_path, '{}_test.vcf.gz'.format(SAMPLE_ID_NORMAL)
+ )
+ self.irods.data_objects.create(cram_path)
+ self.irods.data_objects.create(vcf_path)
+
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sl = self.plugin.get_shortcut_links(
+ self.study, study_tables, case=[SOURCE_ID_NORMAL]
+ )
+
+ self.assertEqual(len(sl['data']['session']['files']), 1)
+ self.assertEqual(len(sl['data']['bam']['files']), 1)
+ self.assertEqual(len(sl['data']['vcf']['files']), 1)
+ self.assertEqual(
+ sl['data']['session']['files'][0]['url'],
+ reverse(
+ 'samplesheets.studyapps.cancer:igv',
+ kwargs={'genericmaterial': self.source.sodar_uuid},
+ ),
+ )
+ self.assertEqual(
+ sl['data']['bam']['files'][0]['url'],
+ settings.IRODS_WEBDAV_URL
+ + get_library_file_path(
+ self.assay,
+ LIBRARY_ID_NORMAL,
+ 'bam',
+ self.irods_backend,
+ self.irods,
+ ),
)
self.assertEqual(
sl['data']['vcf']['files'][0]['url'],
settings.IRODS_WEBDAV_URL
- + get_pedigree_file_path('vcf', self.source, study_tables),
+ + get_library_file_path(
+ self.assay,
+ LIBRARY_ID_NORMAL,
+ 'vcf',
+ self.irods_backend,
+ self.irods,
+ ),
)
def test_get_shortcut_links_multiple(self):
"""Test get_shortcut_links() with multiple BAM/VCF files"""
self.irods.collections.create(self.source_path)
- bam_path = os.path.join(
- self.source_path, '{}_test_2022-11-06.bam'.format(SAMPLE_ID_NORMAL)
+ cram_path = os.path.join(
+ self.source_path, '{}_test_2022-11-06.cram'.format(SAMPLE_ID_NORMAL)
)
- bam_path2 = os.path.join(
+ bam_path = os.path.join(
self.source_path, '{}_test_2022-11-07.bam'.format(SAMPLE_ID_NORMAL)
)
vcf_path = os.path.join(
@@ -222,8 +307,8 @@ def test_get_shortcut_links_multiple(self):
self.source_path,
'{}_test.vcf_2022-11-07.vcf.gz'.format(SAMPLE_ID_NORMAL),
)
+ self.irods.data_objects.create(cram_path)
self.irods.data_objects.create(bam_path)
- self.irods.data_objects.create(bam_path2)
self.irods.data_objects.create(vcf_path)
self.irods.data_objects.create(vcf_path2)
@@ -237,14 +322,12 @@ def test_get_shortcut_links_multiple(self):
# Only the most recent bam and vcf should be returned
self.assertEqual(len(sl['data']['bam']['files']), 1)
self.assertEqual(len(sl['data']['vcf']['files']), 1)
- self.assertTrue(
- sl['data']['bam']['files'][0]['url'].endswith(bam_path2)
- )
+ self.assertTrue(sl['data']['bam']['files'][0]['url'].endswith(bam_path))
self.assertTrue(
sl['data']['vcf']['files'][0]['url'].endswith(vcf_path2)
)
- def test_get_shortcut_links_bam(self):
+ def test_get_shortcut_links_bam_only(self):
"""Test get_shortcut_links() with BAM file only"""
self.irods.collections.create(self.source_path)
bam_path = os.path.join(
@@ -260,7 +343,23 @@ def test_get_shortcut_links_bam(self):
self.assertEqual(len(sl['data']['bam']['files']), 1)
self.assertEqual(len(sl['data']['vcf']['files']), 0)
- def test_get_shortcut_links_vcf(self):
+ def test_get_shortcut_links_cram_only(self):
+ """Test get_shortcut_links() with CRAM file only"""
+ self.irods.collections.create(self.source_path)
+ cram_path = os.path.join(
+ self.source_path, '{}_test.cram'.format(SAMPLE_ID_NORMAL)
+ )
+ self.irods.data_objects.create(cram_path)
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sl = self.plugin.get_shortcut_links(
+ self.study, study_tables, case=[SOURCE_ID_NORMAL]
+ )
+ self.assertEqual(len(sl['data']['session']['files']), 1)
+ self.assertEqual(len(sl['data']['bam']['files']), 1)
+ self.assertEqual(len(sl['data']['vcf']['files']), 0)
+
+ def test_get_shortcut_links_vcf_only(self):
"""Test get_shortcut_links() with VCF file only"""
self.irods.collections.create(self.source_path)
vcf_path = os.path.join(
@@ -292,7 +391,7 @@ def test_get_shortcut_links_invalid(self):
self.assertEqual(len(sl['data']['bam']['files']), 0)
self.assertEqual(len(sl['data']['vcf']['files']), 0)
- def test_get_shortcut_links_files_omit(self):
+ def test_get_shortcut_links_omit(self):
"""Test get_shortcut_links() with omittable files in iRODS"""
self.irods.collections.create(self.source_path)
# Create omittable files which come before real ones alphabetically
@@ -332,15 +431,27 @@ def test_get_shortcut_links_files_omit(self):
self.assertEqual(
sl['data']['bam']['files'][0]['url'],
settings.IRODS_WEBDAV_URL
- + get_pedigree_file_path('bam', self.source, study_tables),
+ + get_library_file_path(
+ self.assay,
+ LIBRARY_ID_NORMAL,
+ 'bam',
+ self.irods_backend,
+ self.irods,
+ ),
)
self.assertEqual(
sl['data']['vcf']['files'][0]['url'],
settings.IRODS_WEBDAV_URL
- + get_pedigree_file_path('vcf', self.source, study_tables),
+ + get_library_file_path(
+ self.assay,
+ LIBRARY_ID_NORMAL,
+ 'vcf',
+ self.irods_backend,
+ self.irods,
+ ),
)
- def test_get_shortcut_links_files_omit_only(self):
+ def test_get_shortcut_links_omit_only(self):
"""Test get_shortcut_links() with only omittable files in iRODS"""
self.irods.collections.create(self.source_path)
bam_path_omit = os.path.join(
@@ -360,18 +471,63 @@ def test_get_shortcut_links_files_omit_only(self):
self.assertEqual(len(sl['data']['bam']['files']), 0)
self.assertEqual(len(sl['data']['vcf']['files']), 0)
- def test_get_shortcut_links_files_omit_override(self):
+ def test_get_shortcut_links_omit_only_empty_overrides(self):
+ """Test get_shortcut_links() with empty project overrides"""
+ app_settings.set(
+ 'samplesheets', 'igv_omit_bam', '', project=self.project
+ )
+ app_settings.set(
+ 'samplesheets', 'igv_omit_vcf', '', project=self.project
+ )
+ self.irods.collections.create(self.source_path)
+ bam_path_omit = os.path.join(
+ self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID_NORMAL)
+ )
+ vcf_path_omit = os.path.join(
+ self.source_path, '{}_cnv.vcf.gz'.format(SAMPLE_ID_NORMAL)
+ )
+ self.irods.data_objects.create(bam_path_omit)
+ self.irods.data_objects.create(vcf_path_omit)
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sl = self.plugin.get_shortcut_links(
+ self.study, study_tables, case=[SOURCE_ID_NORMAL]
+ )
+ self.assertEqual(len(sl['data']['session']['files']), 1)
+ self.assertEqual(len(sl['data']['bam']['files']), 1)
+ self.assertEqual(len(sl['data']['vcf']['files']), 1)
+
+ def test_get_shortcut_links_omit_only_cram(self):
+ """Test get_shortcut_links() with omittable CRAM file in iRODS"""
+ app_settings.set(
+ 'samplesheets', 'igv_omit_bam', '*omit.cram', project=self.project
+ )
+ self.irods.collections.create(self.source_path)
+ cram_path_omit = os.path.join(
+ self.source_path, '{}_omit.cram'.format(SAMPLE_ID_NORMAL)
+ )
+ self.irods.data_objects.create(cram_path_omit)
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sl = self.plugin.get_shortcut_links(
+ self.study, study_tables, case=[SOURCE_ID_NORMAL]
+ )
+ self.assertEqual(len(sl['data']['session']['files']), 0)
+ self.assertEqual(len(sl['data']['bam']['files']), 0)
+ self.assertEqual(len(sl['data']['vcf']['files']), 0)
+
+ def test_get_shortcut_links_omit_override(self):
"""Test get_shortcut_links() with project-specific omit override"""
app_settings.set(
'samplesheets',
'igv_omit_bam',
- 'test.bam, xxx.bam',
+ '*test.bam,*xxx.bam',
project=self.project,
)
app_settings.set(
'samplesheets',
'igv_omit_vcf',
- 'test.vcf.gz, yyy.vcf.gz',
+ '*test.vcf.gz,*yyy.vcf.gz',
project=self.project,
)
self.irods.collections.create(self.source_path)
@@ -425,7 +581,28 @@ def test_update_cache_files(self):
self.assertEqual(ci['bam'][CASE_IDS[i]], None)
self.assertEqual(ci['vcf'][CASE_IDS[i]], None)
- def test_update_cache_files_omit(self):
+ def test_update_cache_cram(self):
+ """Test update_cache() with CRAM file in iRODS"""
+ self.irods.collections.create(self.source_path)
+ cram_path = os.path.join(
+ self.source_path, '{}_test.cram'.format(SAMPLE_ID_NORMAL)
+ )
+ vcf_path = os.path.join(
+ self.source_path, '{}_test.vcf.gz'.format(SAMPLE_ID_NORMAL)
+ )
+ self.irods.data_objects.create(cram_path)
+ self.irods.data_objects.create(vcf_path)
+ self.plugin.update_cache(self.cache_name, self.project)
+ ci = self.cache_backend.get_cache_item(
+ APP_NAME, self.cache_name, self.project
+ ).data
+ self.assertEqual(ci['bam'][CASE_IDS[0]], cram_path)
+ self.assertEqual(ci['vcf'][CASE_IDS[0]], vcf_path)
+ for i in range(1, len(CASE_IDS) - 1):
+ self.assertEqual(ci['bam'][CASE_IDS[i]], None)
+ self.assertEqual(ci['vcf'][CASE_IDS[i]], None)
+
+ def test_update_cache_omit(self):
"""Test update_cache() with omittable files in iRODS"""
self.irods.collections.create(self.source_path)
# Create omittable files which come before real ones alphabetically
@@ -455,10 +632,9 @@ def test_update_cache_files_omit(self):
self.assertEqual(ci['bam'][CASE_IDS[i]], None)
self.assertEqual(ci['vcf'][CASE_IDS[i]], None)
- def test_update_cache_files_omit_only(self):
+ def test_update_cache_omit_only(self):
"""Test update_cache() with only omittable files in iRODS"""
self.irods.collections.create(self.source_path)
- # Create omittable files which come before real ones alphabetically
bam_path_omit = os.path.join(
self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID_NORMAL)
)
@@ -475,18 +651,67 @@ def test_update_cache_files_omit_only(self):
self.assertEqual(ci['bam'][CASE_IDS[i]], None)
self.assertEqual(ci['vcf'][CASE_IDS[i]], None)
- def test_update_cache_files_omit_override(self):
+ def test_update_cache_omit_only_empty_overrides(self):
+ """Test update_cache() with empty project overrides"""
+ app_settings.set(
+ 'samplesheets', 'igv_omit_bam', '', project=self.project
+ )
+ app_settings.set(
+ 'samplesheets', 'igv_omit_vcf', '', project=self.project
+ )
+ self.irods.collections.create(self.source_path)
+ bam_path_omit = os.path.join(
+ self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID_NORMAL)
+ )
+ vcf_path_omit = os.path.join(
+ self.source_path, '{}_cnv.vcf.gz'.format(SAMPLE_ID_NORMAL)
+ )
+ self.irods.data_objects.create(bam_path_omit)
+ self.irods.data_objects.create(vcf_path_omit)
+ self.plugin.update_cache(self.cache_name, self.project)
+ ci = self.cache_backend.get_cache_item(
+ APP_NAME, self.cache_name, self.project
+ ).data
+ self.assertEqual(ci['bam'][CASE_IDS[0]], bam_path_omit)
+ self.assertEqual(ci['vcf'][CASE_IDS[0]], vcf_path_omit)
+ for i in range(1, len(CASE_IDS) - 1):
+ self.assertEqual(ci['bam'][CASE_IDS[i]], None)
+ self.assertEqual(ci['vcf'][CASE_IDS[i]], None)
+
+ def test_update_cache_omit_only_cram(self):
+ """Test update_cache() with omittable CRAM file in iRODS"""
+ app_settings.set(
+ 'samplesheets', 'igv_omit_bam', '*omit.cram', project=self.project
+ )
+ self.irods.collections.create(self.source_path)
+ cram_path_omit = os.path.join(
+ self.source_path, '{}_omit.cram'.format(SAMPLE_ID_NORMAL)
+ )
+ vcf_path_omit = os.path.join(
+ self.source_path, '{}_cnv.vcf.gz'.format(SAMPLE_ID_NORMAL)
+ )
+ self.irods.data_objects.create(cram_path_omit)
+ self.irods.data_objects.create(vcf_path_omit)
+ self.plugin.update_cache(self.cache_name, self.project)
+ ci = self.cache_backend.get_cache_item(
+ APP_NAME, self.cache_name, self.project
+ ).data
+ for i in range(0, len(CASE_IDS) - 1):
+ self.assertEqual(ci['bam'][CASE_IDS[i]], None)
+ self.assertEqual(ci['vcf'][CASE_IDS[i]], None)
+
+ def test_update_cache_omit_override(self):
"""Test update_cache() with project-specific omit override"""
app_settings.set(
'samplesheets',
'igv_omit_bam',
- 'test.bam, xxx.bam',
+ '*test.bam,*xxx.bam',
project=self.project,
)
app_settings.set(
'samplesheets',
'igv_omit_vcf',
- 'test.vcf.gz, yyy.vcf.gz',
+ '*test.vcf.gz,*yyy.vcf.gz',
project=self.project,
)
self.irods.collections.create(self.source_path)
diff --git a/samplesheets/studyapps/cancer/utils.py b/samplesheets/studyapps/cancer/utils.py
index af161f84c..89c9c5361 100644
--- a/samplesheets/studyapps/cancer/utils.py
+++ b/samplesheets/studyapps/cancer/utils.py
@@ -3,9 +3,9 @@
import os
from samplesheets.studyapps.utils import (
- get_igv_omit_override,
- check_igv_file_name,
- FILE_TYPE_SUFFIXES,
+ get_igv_omit_list,
+ check_igv_file_suffix,
+ check_igv_file_path,
)
from samplesheets.utils import get_latest_file_path
@@ -13,11 +13,11 @@
def get_library_file_path(assay, library_name, file_type, irods_backend, irods):
"""
Return iRODS path for the most recent file of type "bam" or "vcf"
- linked to the library.
+ linked to the library. CRAM files are included in "bam" searches.
:param assay: Assay object
:param library_name: Library name (string)
- :param file_type: String ("bam" or "vcf")
+ :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM)
:param irods_backend: IrodsAPI object
:param irods: IRODSSession object
:return: String
@@ -25,13 +25,13 @@ def get_library_file_path(assay, library_name, file_type, irods_backend, irods):
assay_path = irods_backend.get_path(assay)
query_path = os.path.join(assay_path, library_name)
file_paths = []
- override = get_igv_omit_override(assay.get_project(), file_type)
+ omit_list = get_igv_omit_list(assay.get_project(), file_type)
try:
obj_list = irods_backend.get_objects(irods, query_path)
- for obj in obj_list['irods_data']:
- if obj['name'].lower().endswith(
- FILE_TYPE_SUFFIXES[file_type]
- ) and check_igv_file_name(obj['name'], file_type, override):
+ for obj in obj_list:
+ if check_igv_file_suffix(
+ obj['name'].lower(), file_type
+ ) and check_igv_file_path(obj['path'], omit_list):
file_paths.append(obj['path'])
except Exception:
pass
diff --git a/samplesheets/studyapps/germline/plugins.py b/samplesheets/studyapps/germline/plugins.py
index 0655a3ab7..995805c1e 100644
--- a/samplesheets/studyapps/germline/plugins.py
+++ b/samplesheets/studyapps/germline/plugins.py
@@ -13,19 +13,19 @@
from samplesheets.models import Investigation, Study, GenericMaterial
from samplesheets.plugins import SampleSheetStudyPluginPoint
from samplesheets.rendering import SampleSheetTableBuilder
-from samplesheets.studyapps.utils import (
- get_igv_omit_override,
- check_igv_file_name,
- get_igv_session_url,
- get_igv_irods_url,
-)
-from samplesheets.utils import get_index_by_header
-
from samplesheets.studyapps.germline.utils import (
get_pedigree_file_path,
get_families,
get_family_sources,
)
+from samplesheets.studyapps.utils import (
+ get_igv_omit_list,
+ check_igv_file_suffix,
+ check_igv_file_path,
+ get_igv_session_url,
+ get_igv_irods_url,
+)
+from samplesheets.utils import get_index_by_header
logger = logging.getLogger(__name__)
@@ -35,7 +35,6 @@
# SODAR constants
PROJECT_TYPE_PROJECT = SODAR_CONSTANTS['PROJECT_TYPE_PROJECT']
-
# Local constants
APP_NAME = 'samplesheets.studyapps.germline'
@@ -86,8 +85,8 @@ def get_shortcut_column(self, study, study_tables):
'files': {
'type': 'modal',
'icon': 'mdi:folder-open-outline',
- 'title': 'View links to pedigree BAM, VCF and IGV session '
- 'files',
+ 'title': 'View links to pedigree BAM/CRAM, VCF and IGV '
+ 'session files',
},
},
'data': [],
@@ -179,7 +178,7 @@ def get_shortcut_links(self, study, study_tables, **kwargs):
'title': 'Pedigree-Wise Links for {}'.format(query_id),
'data': {
'session': {'title': 'IGV Session File', 'files': []},
- 'bam': {'title': 'BAM Files', 'files': []},
+ 'bam': {'title': 'BAM/CRAM Files', 'files': []},
'vcf': {'title': 'VCF File', 'files': []},
},
}
@@ -197,7 +196,7 @@ def get_shortcut_links(self, study, study_tables, **kwargs):
project=study.get_project(),
)
- # BAM links
+ # BAM/CRAM links
for source in sources:
# Use cached value if present
if cache_item and source.name in cache_item.data['bam']:
@@ -211,10 +210,10 @@ def get_shortcut_links(self, study, study_tables, **kwargs):
{
'label': source.name,
'url': webdav_url + bam_path,
- 'title': 'Download BAM file',
+ 'title': 'Download BAM/CRAM file',
'extra_links': [
{
- 'label': 'Add BAM file to IGV',
+ 'label': 'Add BAM/CRAM file to IGV',
'icon': 'mdi:plus-thick',
'url': get_igv_irods_url(bam_path, merge=True),
}
@@ -310,7 +309,7 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend):
study_objs = irods_backend.get_objects(
irods, irods_backend.get_path(study)
)
- obj_len = len(study_objs['irods_data'])
+ obj_len = len(study_objs)
logger.debug(
'Query returned {} data object{}'.format(
obj_len, 's' if obj_len != 1 else ''
@@ -322,8 +321,8 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend):
logger.error('Error querying for study objects: {}'.format(ex))
project = study.get_project()
- bam_override = get_igv_omit_override(project, 'bam')
- vcf_override = get_igv_omit_override(project, 'vcf')
+ bam_omit_list = get_igv_omit_list(project, 'bam')
+ vcf_omit_list = get_igv_omit_list(project, 'vcf')
for assay in study.assays.all():
assay_plugin = assay.get_plugin()
@@ -342,33 +341,32 @@ def _update_study_cache(cls, study, user, cache_backend, irods_backend):
source_name = row[0]['value']
if source_name not in bam_paths:
bam_paths[source_name] = []
- # Add BAM objects
+ # Add BAM/CRAM objects
path = assay_plugin.get_row_path(
row, assay_table, assay, assay_path
)
if obj_len > 0 and path not in bam_paths[source_name]:
bam_paths[source_name] += [
o['path']
- for o in study_objs['irods_data']
+ for o in study_objs
if o['path'].startswith(path + '/')
- and o['name'].lower().endswith('bam')
- and check_igv_file_name(o['name'], 'bam', bam_override)
+ and check_igv_file_suffix(o['name'], 'bam')
+ and check_igv_file_path(o['path'], bam_omit_list)
]
- row_fam = row[fam_idx]['value']
# Add VCF objects
- if row_fam:
- vcf_query_id = row_fam
- else:
+ if fam_idx and row[fam_idx].get('value'):
+ vcf_query_id = row[fam_idx]['value']
+ else: # If family column isn't present/filled, use source name
vcf_query_id = source_name
if vcf_query_id not in vcf_paths:
vcf_paths[vcf_query_id] = []
if obj_len > 0 and path not in vcf_paths[vcf_query_id]:
vcf_paths[vcf_query_id] += [
o['path']
- for o in study_objs['irods_data']
+ for o in study_objs
if o['path'].startswith(path + '/')
and o['name'].lower().endswith('vcf.gz')
- and check_igv_file_name(o['name'], 'vcf', vcf_override)
+ and check_igv_file_path(o['path'], vcf_omit_list)
]
# Update data
diff --git a/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py b/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py
index db4e8f892..ee960a0b7 100644
--- a/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py
+++ b/samplesheets/studyapps/germline/tests/test_plugins_taskflow.py
@@ -13,6 +13,7 @@
# Taskflowbackend dependency
from taskflowbackend.tests.base import TaskflowViewTestBase
+from samplesheets.models import GenericMaterial
from samplesheets.rendering import SampleSheetTableBuilder
from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR
from samplesheets.tests.test_models import SampleSheetModelMixin
@@ -140,6 +141,29 @@ def test_get_shortcut_column_files(self):
self.assertEqual(sc['data'][i]['igv']['enabled'], False)
self.assertEqual(sc['data'][i]['files']['enabled'], False)
+ def test_get_shortcut_column_cram_file(self):
+ """Test get_shortcut_column() with cache item and CRAM file in iRODS"""
+ self.irods.collections.create(self.source_path)
+ cram_path = os.path.join(
+ self.source_path, '{}_test.cram'.format(SAMPLE_ID)
+ )
+ vcf_path = os.path.join(
+ self.source_path, '{}_test.vcf.gz'.format(FAMILY_ID)
+ )
+ self.irods.data_objects.create(cram_path)
+ self.irods.data_objects.create(vcf_path)
+
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sc = self.plugin.get_shortcut_column(self.study, study_tables)
+
+ for i in range(0, 2):
+ self.assertEqual(sc['data'][i]['igv']['enabled'], True)
+ self.assertEqual(sc['data'][i]['files']['enabled'], True)
+ for i in range(3, 5):
+ self.assertEqual(sc['data'][i]['igv']['enabled'], False)
+ self.assertEqual(sc['data'][i]['files']['enabled'], False)
+
def test_get_shortcut_column_parent_vcf(self):
"""Test get_shortcut_column() with VCF file in a parent's collection"""
self.irods.collections.create(self.parent_path)
@@ -266,14 +290,53 @@ def test_get_shortcut_links_files(self):
+ get_pedigree_file_path('vcf', self.source, study_tables),
)
+ def test_get_shortcut_links_cram(self):
+ """Test get_shortcut_links() with CRAM file in iRODS"""
+ self.irods.collections.create(self.source_path)
+ cram_path = os.path.join(
+ self.source_path, '{}_test.cram'.format(SAMPLE_ID)
+ )
+ vcf_path = os.path.join(
+ self.source_path, '{}_test.vcf.gz'.format(FAMILY_ID)
+ )
+ self.irods.data_objects.create(cram_path)
+ self.irods.data_objects.create(vcf_path)
+
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sl = self.plugin.get_shortcut_links(
+ self.study, study_tables, family=[FAMILY_ID]
+ )
+
+ self.assertEqual(len(sl['data']['session']['files']), 1)
+ self.assertEqual(len(sl['data']['bam']['files']), 1)
+ self.assertEqual(len(sl['data']['vcf']['files']), 1)
+ self.assertEqual(
+ sl['data']['session']['files'][0]['url'],
+ reverse(
+ 'samplesheets.studyapps.germline:igv',
+ kwargs={'genericmaterial': self.source.sodar_uuid},
+ ),
+ )
+ self.assertEqual(
+ sl['data']['bam']['files'][0]['url'],
+ settings.IRODS_WEBDAV_URL
+ + get_pedigree_file_path('bam', self.source, study_tables),
+ )
+ self.assertEqual(
+ sl['data']['vcf']['files'][0]['url'],
+ settings.IRODS_WEBDAV_URL
+ + get_pedigree_file_path('vcf', self.source, study_tables),
+ )
+
def test_get_shortcut_links_multiple(self):
"""Test get_shortcut_links() with multiple BAM/VCF files"""
self.irods.collections.create(self.source_path)
bam_path = os.path.join(
self.source_path, '{}_test_2022-11-06.bam'.format(SAMPLE_ID)
)
- bam_path2 = os.path.join(
- self.source_path, '{}_test_2022-11-07.bam'.format(SAMPLE_ID)
+ cram_path = os.path.join(
+ self.source_path, '{}_test_2022-11-07.cram'.format(SAMPLE_ID)
)
vcf_path = os.path.join(
self.source_path, '{}_test.vcf_2022-11-06.vcf.gz'.format(FAMILY_ID)
@@ -282,7 +345,7 @@ def test_get_shortcut_links_multiple(self):
self.source_path, '{}_test.vcf_2022-11-07.vcf.gz'.format(FAMILY_ID)
)
self.irods.data_objects.create(bam_path)
- self.irods.data_objects.create(bam_path2)
+ self.irods.data_objects.create(cram_path)
self.irods.data_objects.create(vcf_path)
self.irods.data_objects.create(vcf_path2)
@@ -293,17 +356,17 @@ def test_get_shortcut_links_multiple(self):
)
self.assertEqual(len(sl['data']['session']['files']), 1)
- # Only the most recent bam and vcf should be returned
+ # Only the most recent bam/cram and vcf should be returned
self.assertEqual(len(sl['data']['bam']['files']), 1)
self.assertEqual(len(sl['data']['vcf']['files']), 1)
self.assertTrue(
- sl['data']['bam']['files'][0]['url'].endswith(bam_path2)
+ sl['data']['bam']['files'][0]['url'].endswith(cram_path)
)
self.assertTrue(
sl['data']['vcf']['files'][0]['url'].endswith(vcf_path2)
)
- def test_get_shortcut_links_bam(self):
+ def test_get_shortcut_links_bam_only(self):
"""Test get_shortcut_links() with BAM file only"""
self.irods.collections.create(self.source_path)
bam_path = os.path.join(
@@ -319,7 +382,23 @@ def test_get_shortcut_links_bam(self):
self.assertEqual(len(sl['data']['bam']['files']), 1)
self.assertEqual(len(sl['data']['vcf']['files']), 0)
- def test_get_shortcut_links_vcf(self):
+ def test_get_shortcut_links_cram_only(self):
+ """Test get_shortcut_links() with CRAM file only"""
+ self.irods.collections.create(self.source_path)
+ cram_path = os.path.join(
+ self.source_path, '{}_test.cram'.format(SAMPLE_ID)
+ )
+ self.irods.data_objects.create(cram_path)
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sl = self.plugin.get_shortcut_links(
+ self.study, study_tables, family=[FAMILY_ID]
+ )
+ self.assertEqual(len(sl['data']['session']['files']), 1)
+ self.assertEqual(len(sl['data']['bam']['files']), 1)
+ self.assertEqual(len(sl['data']['vcf']['files']), 0)
+
+ def test_get_shortcut_links_vcf_only(self):
"""Test get_shortcut_links() with VCF file only"""
self.irods.collections.create(self.source_path)
vcf_path = os.path.join(
@@ -351,7 +430,7 @@ def test_get_shortcut_links_invalid(self):
self.assertEqual(len(sl['data']['bam']['files']), 0)
self.assertEqual(len(sl['data']['vcf']['files']), 0)
- def test_get_shortcut_links_files_omit(self):
+ def test_get_shortcut_links_omit(self):
"""Test get_shortcut_links() with omittable files in iRODS"""
self.irods.collections.create(self.source_path)
bam_path = os.path.join(
@@ -398,7 +477,7 @@ def test_get_shortcut_links_files_omit(self):
+ get_pedigree_file_path('vcf', self.source, study_tables),
)
- def test_get_shortcut_links_files_omit_only(self):
+ def test_get_shortcut_links_omit_only(self):
"""Test get_shortcut_links() with only omittable files in iRODS"""
self.irods.collections.create(self.source_path)
bam_path_omit = os.path.join(
@@ -419,18 +498,69 @@ def test_get_shortcut_links_files_omit_only(self):
self.assertEqual(len(sl['data']['bam']['files']), 0)
self.assertEqual(len(sl['data']['vcf']['files']), 0)
- def test_get_shortcut_links_files_omit_override(self):
+ def test_get_shortcut_links_omit_only_empty_overrides(self):
+ """Test get_shortcut_links() with empty project overrides"""
+ app_settings.set(
+ 'samplesheets', 'igv_omit_bam', '', project=self.project
+ )
+ app_settings.set(
+ 'samplesheets', 'igv_omit_vcf', '', project=self.project
+ )
+ self.irods.collections.create(self.source_path)
+ bam_path_omit = os.path.join(
+ self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID)
+ )
+ vcf_path_omit = os.path.join(
+ self.source_path, '{}_cnv.vcf.gz'.format(FAMILY_ID)
+ )
+ self.irods.data_objects.create(bam_path_omit)
+ self.irods.data_objects.create(vcf_path_omit)
+
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sl = self.plugin.get_shortcut_links(
+ self.study, study_tables, family=[FAMILY_ID]
+ )
+ self.assertEqual(len(sl['data']['session']['files']), 1)
+ self.assertEqual(len(sl['data']['bam']['files']), 1)
+ self.assertEqual(len(sl['data']['vcf']['files']), 1)
+
+ def test_get_shortcut_links_omit_cram(self):
+ """Test get_shortcut_links() with omittable CRAM file in iRODS"""
+ app_settings.set(
+ 'samplesheets', 'igv_omit_bam', '*omit.cram', project=self.project
+ )
+ self.irods.collections.create(self.source_path)
+ cram_path_omit = os.path.join(
+ self.source_path, '{}_omit.cram'.format(SAMPLE_ID)
+ )
+ vcf_path_omit = os.path.join(
+ self.source_path, '{}_cnv.vcf.gz'.format(FAMILY_ID)
+ )
+ self.irods.data_objects.create(cram_path_omit)
+ self.irods.data_objects.create(vcf_path_omit)
+
+ self.plugin.update_cache(self.cache_name, self.project)
+ study_tables = self.tb.build_study_tables(self.study)
+ sl = self.plugin.get_shortcut_links(
+ self.study, study_tables, family=[FAMILY_ID]
+ )
+ self.assertEqual(len(sl['data']['session']['files']), 0)
+ self.assertEqual(len(sl['data']['bam']['files']), 0)
+ self.assertEqual(len(sl['data']['vcf']['files']), 0)
+
+ def test_get_shortcut_links_omit_override(self):
"""Test get_shortcut_links() with project-specific omit override"""
app_settings.set(
'samplesheets',
'igv_omit_bam',
- 'test.bam, xxx.bam',
+ '*test.bam,*xxx.bam',
project=self.project,
)
app_settings.set(
'samplesheets',
'igv_omit_vcf',
- 'test.vcf.gz, yyy.vcf.gz',
+ '*test.vcf.gz,*yyy.vcf.gz',
project=self.project,
)
self.irods.collections.create(self.source_path)
@@ -483,7 +613,26 @@ def test_update_cache_files(self):
self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path)
self.assertIsNone(ci['vcf'][FAMILY_ID2])
- def test_update_cache_files_omit(self):
+ def test_update_cache_cram(self):
+ """Test update_cache() with CRAM file in iRODS"""
+ self.irods.collections.create(self.source_path)
+ cram_path = os.path.join(
+ self.source_path, '{}_test.cram'.format(SAMPLE_ID)
+ )
+ vcf_path = os.path.join(
+ self.source_path, '{}_test.vcf.gz'.format(FAMILY_ID)
+ )
+ self.irods.data_objects.create(cram_path)
+ self.irods.data_objects.create(vcf_path)
+ self.plugin.update_cache(self.cache_name, self.project)
+ ci = self.cache_backend.get_cache_item(
+ APP_NAME, self.cache_name, self.project
+ ).data
+ self.assertEqual(ci['bam'][self.source.name], cram_path)
+ self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path)
+ self.assertIsNone(ci['vcf'][FAMILY_ID2])
+
+ def test_update_cache_omit(self):
"""Test update_cache() with omittable files in iRODS"""
self.irods.collections.create(self.source_path)
# Create omittable files which come before real ones alphabetically
@@ -511,7 +660,7 @@ def test_update_cache_files_omit(self):
self.assertEqual(ci['bam'][self.source.name], bam_path)
self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path)
- def test_update_cache_files_omit_only(self):
+ def test_update_cache_omit_only(self):
"""Test update_cache() with only omittable files in iRODS"""
self.irods.collections.create(self.source_path)
bam_path_omit = os.path.join(
@@ -530,18 +679,64 @@ def test_update_cache_files_omit_only(self):
self.assertIsNone(ci['bam'][self.source.name])
self.assertIsNone(ci['vcf'][FAMILY_ID])
- def test_update_cache_files_omit_override(self):
+ def test_update_cache_omit_only_empty_overrides(self):
+ """Test update_cache() with empty project overrides"""
+ app_settings.set(
+ 'samplesheets', 'igv_omit_bam', '', project=self.project
+ )
+ app_settings.set(
+ 'samplesheets', 'igv_omit_vcf', '', project=self.project
+ )
+ self.irods.collections.create(self.source_path)
+ bam_path_omit = os.path.join(
+ self.source_path, '{}_dragen_evidence.bam'.format(SAMPLE_ID)
+ )
+ vcf_path_omit = os.path.join(
+ self.source_path, '{}_cnv.vcf.gz'.format(FAMILY_ID)
+ )
+ self.irods.data_objects.create(bam_path_omit)
+ self.irods.data_objects.create(vcf_path_omit)
+ self.plugin.update_cache(self.cache_name, self.project)
+ ci = self.cache_backend.get_cache_item(
+ APP_NAME, self.cache_name, self.project
+ ).data
+ # Omitted files should not be returned
+ self.assertEqual(ci['bam'][self.source.name], bam_path_omit)
+ self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path_omit)
+
+ def test_update_cache_omit_only_cram(self):
+ """Test update_cache() with omittable CRAM file in iRODS"""
+ app_settings.set(
+ 'samplesheets', 'igv_omit_bam', '*omit.cram', project=self.project
+ )
+ self.irods.collections.create(self.source_path)
+ cram_path_omit = os.path.join(
+ self.source_path, '{}_omit.cram'.format(SAMPLE_ID)
+ )
+ vcf_path_omit = os.path.join(
+ self.source_path, '{}_cnv.vcf.gz'.format(FAMILY_ID)
+ )
+ self.irods.data_objects.create(cram_path_omit)
+ self.irods.data_objects.create(vcf_path_omit)
+ self.plugin.update_cache(self.cache_name, self.project)
+ ci = self.cache_backend.get_cache_item(
+ APP_NAME, self.cache_name, self.project
+ ).data
+ self.assertIsNone(ci['bam'][self.source.name])
+ self.assertIsNone(ci['vcf'][FAMILY_ID])
+
+ def test_update_cache_omit_override(self):
"""Test update_cache() with project-specific omit override"""
app_settings.set(
'samplesheets',
'igv_omit_bam',
- 'test.bam, xxx.bam',
+ '*test.bam,*xxx.bam',
project=self.project,
)
app_settings.set(
'samplesheets',
'igv_omit_vcf',
- 'test.vcf.gz, yyy.vcf.gz',
+ '*test.vcf.gz,*yyy.vcf.gz',
project=self.project,
)
self.irods.collections.create(self.source_path)
@@ -569,3 +764,28 @@ def test_update_cache_files_omit_override(self):
# Omitted files should not be returned
self.assertEqual(ci['bam'][self.source.name], bam_path_omit)
self.assertEqual(ci['vcf'][FAMILY_ID], vcf_path_omit)
+
+ def test_update_cache_no_family(self):
+ """Test update_cache() with no family characteristic in source"""
+ # Clear the family characteristics and header
+ for m in GenericMaterial.objects.filter(
+ study=self.study, item_type='SOURCE'
+ ):
+ m.characteristics.pop('Family')
+ m.headers.remove('Characteristics[Family]')
+ m.save()
+ self.irods.collections.create(self.source_path)
+ bam_path = os.path.join(
+ self.source_path, '{}_test.bam'.format(SAMPLE_ID)
+ )
+ vcf_path = os.path.join(
+ self.source_path, '{}_test.vcf.gz'.format(SAMPLE_ID)
+ )
+ self.irods.data_objects.create(bam_path)
+ self.irods.data_objects.create(vcf_path)
+ self.plugin.update_cache(self.cache_name, self.project)
+ ci = self.cache_backend.get_cache_item(
+ APP_NAME, self.cache_name, self.project
+ ).data
+ self.assertEqual(ci['bam'][self.source.name], bam_path)
+ self.assertEqual(ci['vcf'][self.source.name], vcf_path)
diff --git a/samplesheets/studyapps/germline/utils.py b/samplesheets/studyapps/germline/utils.py
index b1563fb56..69d4ced65 100644
--- a/samplesheets/studyapps/germline/utils.py
+++ b/samplesheets/studyapps/germline/utils.py
@@ -7,9 +7,9 @@
from samplesheets.models import GenericMaterial
from samplesheets.studyapps.utils import (
- get_igv_omit_override,
- check_igv_file_name,
- FILE_TYPE_SUFFIXES,
+ get_igv_omit_list,
+ check_igv_file_path,
+ check_igv_file_suffix,
)
from samplesheets.utils import get_index_by_header
@@ -22,7 +22,7 @@ def get_pedigree_file_path(file_type, source, study_tables):
Return iRODS path for the most recent file of type "bam" or "vcf"
linked to the source.
- :param file_type: String ("bam" or "vcf")
+ :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM)
:param source: GenericMaterial of type SOURCE
:param study_tables: Render study tables
:return: String
@@ -32,7 +32,7 @@ def get_pedigree_file_path(file_type, source, study_tables):
raise Exception('iRODS Backend not available')
query_paths = []
- override = get_igv_omit_override(source.study.get_project(), file_type)
+ omit_list = get_igv_omit_list(source.study.get_project(), file_type)
def _get_val_by_index(row, idx):
return row[idx]['value'] if idx else None
@@ -81,13 +81,11 @@ def _get_val_by_index(row, idx):
obj_list = None
if obj_list:
for query_path in query_paths:
- for obj in obj_list['irods_data']:
+ for obj in obj_list:
if (
obj['path'].startswith(query_path + '/')
- and obj['name']
- .lower()
- .endswith(FILE_TYPE_SUFFIXES[file_type])
- and check_igv_file_name(obj['name'], file_type, override)
+ and check_igv_file_suffix(obj['name'], file_type)
+ and check_igv_file_path(obj['path'], omit_list)
):
file_paths.append(obj['path'])
logger.debug('Added path: {}'.format(obj['path']))
diff --git a/samplesheets/studyapps/tests/test_utils.py b/samplesheets/studyapps/tests/test_utils.py
index a485d9998..57f1a7726 100644
--- a/samplesheets/studyapps/tests/test_utils.py
+++ b/samplesheets/studyapps/tests/test_utils.py
@@ -14,13 +14,22 @@
from projectroles.tests.test_models import ProjectMixin, RoleAssignmentMixin
from samplesheets.plugins import IGV_DEFAULT_GENOME
-from samplesheets.studyapps.utils import get_igv_xml
+from samplesheets.studyapps.utils import (
+ get_igv_omit_list,
+ check_igv_file_suffix,
+ check_igv_file_path,
+ get_igv_xml,
+)
from samplesheets.tests.test_io import SampleSheetIOMixin
app_settings = AppSettingAPI()
+# Local constants
+INVALID_FILE_TYPE = 'INVALID_TYPE'
+
+
class TestStudyAppUtilsBase(
ProjectMixin, RoleAssignmentMixin, SampleSheetIOMixin, TestCase
):
@@ -41,6 +50,162 @@ def setUp(self):
)
+class TestGetIGVOmitList(TestStudyAppUtilsBase):
+ """Tests for get_igv_omit_list()"""
+
+ def test_get(self):
+ """Test get_igv_omit_list()"""
+ self.assertEqual(
+ get_igv_omit_list(self.project, 'bam'), ['*dragen_evidence.bam']
+ )
+ self.assertEqual(
+ get_igv_omit_list(self.project, 'vcf'),
+ ['*cnv.vcf.gz', '*ploidy.vcf.gz', '*sv.vcf.gz'],
+ )
+
+ def test_get_invalid_file_type(self):
+ """Test get_igv_omit_list() with invalid file type"""
+ with self.assertRaises(ValueError):
+ get_igv_omit_list(self.project, INVALID_FILE_TYPE)
+
+ def test_get_no_preceeding_asterisk(self):
+ """Test get_igv_omit_list() without preceeding asterisk"""
+ # NOTE: We can't use override_settings here
+ app_settings.set(
+ 'samplesheets',
+ 'igv_omit_bam',
+ 'dragen_evidence.bam',
+ project=self.project,
+ )
+ app_settings.set(
+ 'samplesheets', 'igv_omit_vcf', 'cnv.vcf.gz', project=self.project
+ )
+ self.assertEqual(
+ get_igv_omit_list(self.project, 'bam'), ['*dragen_evidence.bam']
+ )
+ self.assertEqual(
+ get_igv_omit_list(self.project, 'vcf'), ['*cnv.vcf.gz']
+ )
+
+ def test_get_mixed_preceeding_asterisks(self):
+ """Test get_igv_omit_list() with mixed preceeding asterisk"""
+ app_settings.set(
+ 'samplesheets',
+ 'igv_omit_bam',
+ '*xxx.bam,yyy.bam',
+ project=self.project,
+ )
+ app_settings.set(
+ 'samplesheets',
+ 'igv_omit_vcf',
+ '*xxx.vcf.gz,yyy.vcf.gz',
+ project=self.project,
+ )
+ self.assertEqual(
+ get_igv_omit_list(self.project, 'bam'), ['*xxx.bam', '*yyy.bam']
+ )
+ self.assertEqual(
+ get_igv_omit_list(self.project, 'vcf'),
+ ['*xxx.vcf.gz', '*yyy.vcf.gz'],
+ )
+
+ def test_get_empty_setting(self):
+ """Test get_igv_omit_list() with empty setting values"""
+ app_settings.set(
+ 'samplesheets', 'igv_omit_bam', '', project=self.project
+ )
+ app_settings.set(
+ 'samplesheets', 'igv_omit_vcf', '', project=self.project
+ )
+ self.assertEqual(get_igv_omit_list(self.project, 'bam'), [])
+ self.assertEqual(get_igv_omit_list(self.project, 'vcf'), [])
+
+
+class TestCheckIGVFileSuffix(TestCase): # TestStudyAppUtilsBase not needed
+ """Tests for check_igv_file_suffix()"""
+
+ def test_check_bam(self):
+ """Test checking with BAM type and valid name"""
+ self.assertTrue(check_igv_file_suffix('test.bam', 'bam'))
+
+ def test_check_bam_invalid_name(self):
+ """Test checking with BAM type and invalid name"""
+ self.assertFalse(check_igv_file_suffix('test.vcf', 'bam'))
+
+ def test_check_bam_cram(self):
+ """Test checking with BAM type and CRAM name"""
+ self.assertTrue(check_igv_file_suffix('test.cram', 'bam'))
+
+ def test_check_bam_uppercase(self):
+ """Test checking with BAM type and uppercase name"""
+ self.assertTrue(check_igv_file_suffix('TEST.BAM', 'bam'))
+
+ def test_check_vcf(self):
+ """Test checking with VCF type and valid name"""
+ self.assertTrue(check_igv_file_suffix('test.vcf.gz', 'vcf'))
+
+ def test_check_vcf_no_gz(self):
+ """Test checking with VCF type and name without .gz suffix"""
+ self.assertFalse(check_igv_file_suffix('test.vcf', 'vcf'))
+
+ def test_check_invalid_type(self):
+ """Test checking with invalid_type"""
+ with self.assertRaises(ValueError):
+ check_igv_file_suffix('test.bam', 'INVALID')
+
+
+class TestCheckIGVFilePath(TestCase):
+ """Tests for check_igv_file_path()"""
+
+ def test_path(self):
+ """Test check_igv_file_path()"""
+ path = 'xxx/yyy.bam'
+ omit_list = ['*zzz.bam']
+ self.assertTrue(check_igv_file_path(path, omit_list))
+
+ def test_path_omit_file(self):
+ """Test check_igv_file_path() with file name in omit list"""
+ path = 'xxx/yyy.bam'
+ omit_list = ['*yyy.bam']
+ self.assertFalse(check_igv_file_path(path, omit_list))
+
+ def test_path_empty_list(self):
+ """Test check_igv_file_path() with empty omit list"""
+ path = 'xxx/yyy.bam'
+ omit_list = []
+ self.assertTrue(check_igv_file_path(path, omit_list))
+
+ def test_path_omit_multiple(self):
+ """Test check_igv_file_path() with multiple patterns"""
+ path = 'xxx/yyy.bam'
+ omit_list = ['*yyy.bam', '*zzz.bam']
+ self.assertFalse(check_igv_file_path(path, omit_list))
+
+ def test_path_omit_no_math(self):
+ """Test check_igv_file_path() with multiple non-matching patterns"""
+ path = 'xxx/yyy.bam'
+ omit_list = ['*aaa.bam', '*bbb.bam']
+ self.assertTrue(check_igv_file_path(path, omit_list))
+
+ def test_path_omit_case(self):
+ """Test check_igv_file_path() with file name in different case"""
+ path = 'xxx/YYY.BAM'
+ omit_list = ['*yyy.bam']
+ self.assertFalse(check_igv_file_path(path, omit_list))
+
+ def test_path_omit_collections(self):
+ """Test check_igv_file_path() with matching collections"""
+ path = '000/aaa/bbb/yyy.bam'
+ omit_list = ['*/aaa/bbb/*']
+ self.assertFalse(check_igv_file_path(path, omit_list))
+
+ def test_path_omit_collections_middle(self):
+ """Test check_igv_file_path() with partial collection match"""
+ path = '000/aaa/bbb/yyy.bam'
+ omit_list = ['*/aaa/*/yyy.bam']
+ self.assertFalse(check_igv_file_path(path, omit_list))
+
+
class TestGetIGVXML(TestStudyAppUtilsBase):
"""Tests for get_igv_xml()"""
diff --git a/samplesheets/studyapps/utils.py b/samplesheets/studyapps/utils.py
index cb4868419..cebf5ae26 100644
--- a/samplesheets/studyapps/utils.py
+++ b/samplesheets/studyapps/utils.py
@@ -3,6 +3,7 @@
import hashlib
from lxml import etree as ET
+from pathlib import PurePosixPath
from django.conf import settings
from django.urls import reverse
@@ -17,48 +18,64 @@
# Constants
IGV_URL_BASE = 'http://127.0.0.1:60151'
FILE_TYPE_SUFFIXES = {'bam': '.bam', 'vcf': '.vcf.gz'}
+FILE_TYPE_SUFFIX_CRAM = '.cram' # Special case grouped together with bam
INVALID_TYPE_MSG = 'Invalid value for file_type'
-def get_igv_omit_override(project, file_type):
+def get_igv_omit_list(project, file_type):
"""
- Get IGV omit override setting for a project. NOTE: Added as a separate
- method to avoid redundant database queries.
+ Get list of IGV omit glob patterns for a specific file type in a project.
+ NOTE: Added as a separate method to avoid redundant database queries.
:param project: Project object
- :param file_type: String ("bam" or "vcf")
- :return: List or None
+ :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM)
+ :return: List (appends * in front of each path if missing)
"""
ft = file_type.lower()
if ft not in ['bam', 'vcf']:
raise ValueError(INVALID_TYPE_MSG)
- setting = app_settings.get(
+ setting_val = app_settings.get(
'samplesheets', 'igv_omit_{}'.format(ft), project=project
)
- if setting:
- return [s.strip() for s in setting.split(',')]
- return None
+ if not setting_val:
+ return []
+ return [
+ '{}{}'.format('*' if not s.strip().startswith('*') else '', s.strip())
+ for s in setting_val.split(',')
+ ]
-def check_igv_file_name(file_name, file_type, override=None):
+def check_igv_file_suffix(file_name, file_type):
"""
- Check if file is acceptable for IGV session inclusion. Returns False if
- suffix is found in env vars of omittable files.
+ Check if file name corresponds to the specified file type.
:param file_name: String
- :param file_type: String ("bam" or "vcf")
- :param override: File suffixes to override site-wide setting (list or None)
+ :param file_type: String ("bam" or "vcf", "bam" is also used for CRAM)
:raise: ValueError if file_type is incorrect
- :return: Boolean (True if name is OK)
+ :return: Boolean (True if suffix matches the file type)
"""
- ft = file_type.lower()
- if ft not in ['bam', 'vcf']:
+ if file_type.lower() not in ['bam', 'vcf']:
raise ValueError(INVALID_TYPE_MSG)
- fn = file_name.lower()
- if override:
- return not any([s.lower() for s in override if fn.endswith(s)])
- ol = getattr(settings, 'SHEETS_IGV_OMIT_' + file_type.upper(), [])
- return not any([s.lower() for s in ol if fn.endswith(s)])
+ fn = file_name.lower() # Just in case suffix is in upper case
+ return (
+ fn.endswith(FILE_TYPE_SUFFIXES[file_type])
+ or file_type == 'bam'
+ and fn.endswith(FILE_TYPE_SUFFIX_CRAM)
+ )
+
+
+def check_igv_file_path(path, omit_list):
+ """
+ Check if file path is acceptable for IGV session inclusion. Returns False if
+ pattern is found in IGV omit settings.
+
+ :param path: Full or partial iRODS path (string)
+ :param omit_list: List of path glob patterns to omit (list)
+ :return: Boolean (True if path is OK)
+ """
+ return not any(
+ [p for p in omit_list if PurePosixPath(path.lower()).match(p.lower())]
+ )
def get_igv_session_url(source, app_name, merge=False):
@@ -79,10 +96,7 @@ def get_igv_session_url(source, app_name, merge=False):
'{}:igv'.format(app_name), kwargs={'genericmaterial': source.sodar_uuid}
)
return '{}/load?merge={}&file={}{}.xml'.format(
- IGV_URL_BASE,
- str(merge).lower(),
- file_prefix,
- file_url,
+ IGV_URL_BASE, str(merge).lower(), file_prefix, file_url
)
@@ -104,7 +118,7 @@ def get_igv_xml(project, bam_urls, vcf_urls, vcf_title, request, string=True):
Build IGV session XML file.
:param project: Project object
- :param bam_urls: BAM file URLs (dict {name: url})
+ :param bam_urls: BAM/CRAM file URLs (dict {name: url})
:param vcf_urls: VCF file URLs (dict {name: url})
:param vcf_title: VCF title to prefix to VCF title strings (string)
:param request: Django request
@@ -164,7 +178,7 @@ def get_igv_xml(project, bam_urls, vcf_urls, vcf_title, request, string=True):
'windowFunction': 'count',
},
)
- # BAM panels
+ # BAM/CRAM panels
for bam_name, bam_url in bam_urls.items():
# Generating unique panel name with hashlib
xml_bam_panel = ET.SubElement(
diff --git a/samplesheets/templates/samplesheets/irods_requests.html b/samplesheets/templates/samplesheets/irods_requests.html
index 779e186fc..f8741becf 100644
--- a/samplesheets/templates/samplesheets/irods_requests.html
+++ b/samplesheets/templates/samplesheets/irods_requests.html
@@ -181,7 +181,7 @@ iRODS Delete Requests
Reject Request
{% endif %}
- {% if irods_request.user == request.user %}
+ {% if request.user.is_superuser or irods_request.user == request.user %}
{% if irods_request.status == 'ACTIVE' or irods_request.status == 'FAILED' %}
diff --git a/samplesheets/templates/samplesheets/samplesheet_import_form.html b/samplesheets/templates/samplesheets/samplesheet_import_form.html
index 2b2bec86a..de71ee985 100644
--- a/samplesheets/templates/samplesheets/samplesheet_import_form.html
+++ b/samplesheets/templates/samplesheets/samplesheet_import_form.html
@@ -91,17 +91,11 @@
href="{% url 'samplesheets:project_sheets' project=project.sodar_uuid %}">
Cancel
- {% if replace_sheets %}
-
- {% else %}
-
- {% endif %}
+
diff --git a/samplesheets/tests/edit/i_small_assay_insert_pool.json b/samplesheets/tests/edit/i_small_assay_insert_pool_material.json
similarity index 100%
rename from samplesheets/tests/edit/i_small_assay_insert_pool.json
rename to samplesheets/tests/edit/i_small_assay_insert_pool_material.json
diff --git a/samplesheets/tests/edit/i_small_assay_insert_pool_process.json b/samplesheets/tests/edit/i_small_assay_insert_pool_process.json
new file mode 100644
index 000000000..6bb7de853
--- /dev/null
+++ b/samplesheets/tests/edit/i_small_assay_insert_pool_process.json
@@ -0,0 +1,124 @@
+{
+ "study": "11111111-1111-1111-1111-111111111111",
+ "assay": "22222222-2222-2222-2222-222222222222",
+ "nodes": [
+ {
+ "cells": [
+ {
+ "uuid": "11111111-1111-1111-5555-000000000000",
+ "obj_cls": "GenericMaterial",
+ "value": "0818-N1"
+ }
+ ]
+ },
+ {
+ "cells": [
+ {
+ "uuid": null,
+ "value": "library preparation",
+ "uuid_ref": "22222222-2222-2222-bbbb-000000000000",
+ "header_name": "Protocol",
+ "header_type": "protocol",
+ "header_field": "col10",
+ "obj_cls": "Process"
+ }
+ ],
+ "headers": [
+ "Protocol REF"
+ ]
+ },
+ {
+ "cells": [
+ {
+ "uuid": null,
+ "value": "0818-N1-DNA1",
+ "unit": "",
+ "header_name": "Name",
+ "header_type": "name",
+ "header_field": "col11",
+ "obj_cls": "GenericMaterial",
+ "item_type": "MATERIAL"
+ }
+ ],
+ "headers": [
+ "Library Name"
+ ]
+ },
+ {
+ "cells": [
+ {
+ "uuid": "22222222-2222-2222-2222-000000000003",
+ "value": "nucleic acid sequencing",
+ "obj_cls": "Process"
+ }
+ ]
+ },
+ {
+ "cells": [
+ {
+ "uuid": null,
+ "value": "0818-N1-DNA1-WES1-R1.fastq.qz",
+ "unit": "",
+ "header_name": "Name",
+ "header_type": "name",
+ "header_field": "col14",
+ "obj_cls": "GenericMaterial",
+ "item_type": "DATA"
+ }
+ ],
+ "headers": [
+ "Raw Data File"
+ ]
+ },
+ {
+ "cells": [
+ {
+ "uuid": null,
+ "value": "0818-N1-DNA1-WES1-R2.fastq.qz",
+ "unit": "",
+ "header_name": "Name",
+ "header_type": "name",
+ "header_field": "col15",
+ "obj_cls": "GenericMaterial",
+ "item_type": "DATA"
+ }
+ ],
+ "headers": [
+ "Raw Data File"
+ ]
+ },
+ {
+ "cells": [
+ {
+ "uuid": null,
+ "value": "somatic variant calling-2",
+ "unit": "",
+ "header_name": "Data Transformation Name",
+ "header_type": "process_name",
+ "header_field": "col16",
+ "obj_cls": "Process"
+ }
+ ],
+ "headers": [
+ "Data Transformation Name"
+ ]
+ },
+ {
+ "cells": [
+ {
+ "uuid": null,
+ "value": "new-somatic.vcf.gz",
+ "unit": "",
+ "header_name": "Name",
+ "header_type": "name",
+ "header_field": "col17",
+ "obj_cls": "GenericMaterial",
+ "item_type": "DATA"
+ }
+ ],
+ "headers": [
+ "Derived Data File"
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/samplesheets/tests/test_models.py b/samplesheets/tests/test_models.py
index 249544e7a..22629a135 100644
--- a/samplesheets/tests/test_models.py
+++ b/samplesheets/tests/test_models.py
@@ -1602,12 +1602,17 @@ def test__repr__(self):
)
self.assertEqual(repr(self.request), expected)
- def test_validate_action(self):
- """Test _validate_action()"""
+ def test_validate_action_move(self):
+ """Test _validate_action() with MOVE status"""
with self.assertRaises(ValidationError):
self.request.action = 'MOVE'
self.request.save()
+ def test_validate_action_legacy(self):
+ """Test _validate_action() with legacy lowercase status"""
+ self.request.action = 'delete'
+ self.request.save() # No exception
+
def test_validate_status(self):
"""Test _validate_status()"""
with self.assertRaises(ValidationError):
diff --git a/samplesheets/tests/test_utils.py b/samplesheets/tests/test_utils.py
index c103fa17e..86bb60fab 100644
--- a/samplesheets/tests/test_utils.py
+++ b/samplesheets/tests/test_utils.py
@@ -27,6 +27,7 @@
compare_inv_replace,
get_webdav_url,
get_ext_link_labels,
+ get_latest_file_path,
)
from samplesheets.tests.test_io import (
SampleSheetIOMixin,
@@ -52,6 +53,9 @@
]
IRODS_TICKET_STR = 'ooChaa1t'
EXT_LINK_PATH_INVALID = '/tmp/NON_EXISTING_EXT_LINK_FILE.json'
+BAM_PATH = '/sodarZone/coll_z/file_2023-02-28.bam'
+BAM_PATH2 = '/sodarZone/coll_x/file_2023-03-01.bam'
+CRAM_PATH = '/sodarZone/coll_y/file_2023-02-29.cram'
class SamplesheetsUtilsTestBase(
@@ -289,6 +293,20 @@ def test_get(self):
@override_settings(SHEETS_EXTERNAL_LINK_PATH=EXT_LINK_PATH_INVALID)
def test_get_default(self):
- """Test retrievint default labels"""
+ """Test retrieving default labels"""
labels = get_ext_link_labels()
self.assertEqual(labels, DEFAULT_EXTERNAL_LINK_LABELS)
+
+
+class TestGetLatestFilePath(SamplesheetsUtilsTestBase):
+ """Tests for get_latest_file_path()"""
+
+ def test_get_bam(self):
+ """Test retrieval with two BAM paths"""
+ self.assertEqual(get_latest_file_path([BAM_PATH, BAM_PATH2]), BAM_PATH2)
+
+ def test_get_mixed(self):
+ """Test retrieval with mixed BAM and CRAM paths"""
+ self.assertEqual(
+ get_latest_file_path([BAM_PATH, BAM_PATH2, CRAM_PATH]), BAM_PATH2
+ )
diff --git a/samplesheets/tests/test_views.py b/samplesheets/tests/test_views.py
index 989c398d9..65fec757c 100644
--- a/samplesheets/tests/test_views.py
+++ b/samplesheets/tests/test_views.py
@@ -120,6 +120,11 @@
EDIT_NEW_VALUE_STR = 'edited value'
DUMMY_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
IRODS_FILE_PATH = '/sodarZone/path/test1.txt'
+TPL_FILE_NAME_FIELDS = [
+ 'investigation_title',
+ 'investigation_id',
+ 'study_title',
+]
with open(CONFIG_PATH_DEFAULT) as fp:
CONFIG_DATA_DEFAULT = json.load(fp)
@@ -796,6 +801,30 @@ def test_post_batch(self):
)
self.project.investigations.first().delete()
+ def test_post_batch_file_name_slash(self):
+ """Test POST with slashes in values used for file names"""
+ for t in ISA_TEMPLATES:
+ self.assertIsNone(self.project.investigations.first())
+ post_data = self._get_post_data(t)
+ for k in TPL_FILE_NAME_FIELDS:
+ if k in post_data:
+ post_data[k] += '/test'
+ with self.login(self.user):
+ response = self.client.post(
+ reverse(
+ 'samplesheets:template_create',
+ kwargs={'project': self.project.sodar_uuid},
+ )
+ + '?sheet_tpl='
+ + t.name,
+ data=post_data,
+ )
+ self.assertEqual(response.status_code, 302, msg=t.name)
+ self.assertIsNotNone(
+ self.project.investigations.first(), msg=t.name
+ )
+ self.project.investigations.first().delete()
+
def test_post_multiple(self):
"""Test multiple requests to add multiple sample sheets (should fail)"""
tpl = ISA_TEMPLATES[0]
@@ -1552,10 +1581,10 @@ class TestIrodsDataRequestListView(
):
"""Tests for IrodsDataRequestListView"""
- def test_list(self):
- """Test GET request for listing delete requests"""
+ def test_get(self):
+ """Test IrodsDataRequestListView GET"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
- request = self.make_irods_request(
+ irods_request = self.make_irods_request(
project=self.project,
action=IRODS_REQUEST_ACTION_DELETE,
path=IRODS_FILE_PATH,
@@ -1571,9 +1600,14 @@ def test_list(self):
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 1)
- self.assertEqual(response.context['object_list'][0], request)
+ context_obj = response.context['object_list'][0]
+ self.assertEqual(context_obj, irods_request)
+ self.assertEqual(
+ context_obj.webdav_url, 'https://127.0.0.1' + IRODS_FILE_PATH
+ ) # Ensure no extra slash is between host and iRODS path
+ self.assertEqual(context_obj.is_collection, False)
- def test_list_as_contributor_by_superuser(self):
+ def test_get_as_contributor_by_superuser(self):
"""Test GET as contibutor with request created by superuser"""
self.make_irods_request(
project=self.project,
@@ -1592,7 +1626,7 @@ def test_list_as_contributor_by_superuser(self):
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 0)
- def test_list_as_contributor2_by_contributor(self):
+ def test_get_as_contributor2_by_contributor(self):
"""Test GET as contributor2 with request created by contributor"""
user_contributor2 = self.make_user('user_contributor2')
self.make_assignment(
diff --git a/samplesheets/tests/test_views_ajax.py b/samplesheets/tests/test_views_ajax.py
index 156c6c495..2a7a8ba05 100644
--- a/samplesheets/tests/test_views_ajax.py
+++ b/samplesheets/tests/test_views_ajax.py
@@ -76,7 +76,12 @@
STUDY_INSERT_PATH = EDIT_DIR + 'i_small_study_insert.json'
ASSAY_INSERT_PATH = EDIT_DIR + 'i_small_assay_insert.json'
ASSAY_INSERT_SPLIT_PATH = EDIT_DIR + 'i_small_assay_insert_split.json'
-ASSAY_INSERT_POOL_PATH = EDIT_DIR + 'i_small_assay_insert_pool.json'
+ASSAY_INSERT_POOL_MATERIAL_PATH = os.path.join(
+ EDIT_DIR, 'i_small_assay_insert_pool_material.json'
+)
+ASSAY_INSERT_POOL_PROCESS_PATH = os.path.join(
+ EDIT_DIR, 'i_small_assay_insert_pool_process.json'
+)
STUDY_DELETE_PATH = EDIT_DIR + 'i_small_study_delete.json'
ASSAY_DELETE_PATH = EDIT_DIR + 'i_small_assay_delete.json'
EDIT_SOURCE_NAME = '0818'
@@ -184,7 +189,7 @@ def setUp(self):
self.assay = self.study.assays.first()
def test_get(self):
- """Test GET for context retrieval with sample sheets"""
+ """Test SheetContextAjaxView GET"""
with self.login(self.user):
response = self.client.get(
reverse(
@@ -289,8 +294,8 @@ def test_get(self):
}
self.assertEqual(response_data, expected)
- def test_no_sheets(self):
- """Test context retrieval without sample sheets"""
+ def test_get_no_sheets(self):
+ """Test GET without sample sheets"""
self.investigation.active = False
self.investigation.save()
@@ -345,7 +350,7 @@ def test_no_sheets(self):
# TODO: Test anonymous request and irods_webdav_enabled
- def test_delegate_min_owner(self):
+ def test_get_delegate_min_owner(self):
"""Test GET as delegate with owner minimum role"""
app_settings.set(
APP_NAME,
@@ -363,7 +368,7 @@ def test_delegate_min_owner(self):
self.assertEqual(response.status_code, 200)
self.assertFalse(json.loads(response.json())['perms']['edit_config'])
- def test_delegate_min_delegate(self):
+ def test_get_delegate_min_delegate(self):
"""Test GET as delegate with delegate minimum role"""
app_settings.set(
APP_NAME,
@@ -381,7 +386,7 @@ def test_delegate_min_delegate(self):
self.assertEqual(response.status_code, 200)
self.assertTrue(json.loads(response.json())['perms']['edit_config'])
- def test_irods_request_alert_owner(self):
+ def test_get_irods_request_alert_owner(self):
"""Test GET with active iRODS request alert as owner"""
self.investigation.irods_status = True
self.investigation.save()
@@ -413,7 +418,7 @@ def test_irods_request_alert_owner(self):
),
)
- def test_irods_request_alert_contributor(self):
+ def test_get_irods_request_alert_contributor(self):
"""Test GET with active iRODS request alert as contributor"""
self.investigation.irods_status = True
self.investigation.save()
@@ -440,7 +445,7 @@ def test_irods_request_alert_contributor(self):
response_data = json.loads(response.data)
self.assertEqual(len(response_data['alerts']), 0)
- def test_inherited_owner(self):
+ def test_get_inherited_owner(self):
"""Test GET as inherited owner"""
# Set up category owner
user_cat = self.make_user('user_cat')
@@ -477,7 +482,7 @@ def setUp(self):
self.cache_args = [APP_NAME, self.cache_name, self.project]
def test_get(self):
- """Test study tables retrieval"""
+ """Test StudyTablesAjaxView GET"""
with self.login(self.user):
response = self.client.get(
reverse(
@@ -513,7 +518,7 @@ def test_get(self):
self.assertNotIn('edit_context', ret_data)
def test_get_edit(self):
- """Test study tables retrieval with edit mode enabled"""
+ """Test GET with edit mode enabled"""
with self.login(self.user):
response = self.client.get(
reverse(
@@ -549,7 +554,7 @@ def test_get_edit(self):
self.assertIsNotNone(ret_data['edit_context']['protocols'])
def test_get_study_cache(self):
- """Test cached study table creation on retrieval"""
+ """Test GET for cached study table creation on retrieval"""
self.assertIsNone(self.cache_backend.get_cache_item(*self.cache_args))
self.assertEqual(JSONCacheItem.objects.count(), 0)
with self.login(self.user):
@@ -566,7 +571,7 @@ def test_get_study_cache(self):
self.assertEqual(JSONCacheItem.objects.count(), 1)
def test_get_study_cache_existing(self):
- """Test retrieval with existing cached study tables"""
+ """Test GET with existing cached study tables"""
study_tables = table_builder.build_study_tables(self.study)
cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid)
self.cache_backend.set_cache_item(
@@ -602,7 +607,7 @@ def setUp(self):
self.assay = self.study.assays.first()
def test_get(self):
- """Test study links retrieval without plugin"""
+ """Test StudyLinksAjaxView GET without plugin"""
with self.login(self.user):
response = self.client.get(
reverse(
@@ -623,7 +628,7 @@ def setUp(self):
self.investigation = self.import_isa_from_file(SHEET_PATH, self.project)
def test_get(self):
- """Test study tables retrieval"""
+ """Test SheetWarningsAjaxView GET"""
with self.login(self.user):
response = self.client.get(
reverse(
@@ -640,19 +645,6 @@ def test_get(self):
class TestSheetCellEditAjaxView(SamplesheetsViewTestBase):
"""Tests for SheetCellEditAjaxView"""
- def setUp(self):
- super().setUp()
- self.investigation = self.import_isa_from_file(SHEET_PATH, self.project)
- self.study = self.investigation.studies.first()
- # Set up POST data
- self.values = {'updated_cells': []}
- # Set up helpers
- self.cache_backend = get_backend_api('sodar_cache')
- self.cache_name = STUDY_TABLE_CACHE_ITEM.format(
- study=self.study.sodar_uuid
- )
- self.cache_args = [APP_NAME, self.cache_name, self.project]
-
@classmethod
def _convert_ontology_value(cls, value):
"""
@@ -668,8 +660,21 @@ def _convert_ontology_value(cls, value):
value['unit'] = None
return value
- def test_edit_name(self):
- """Test editing the name of a material"""
+ def setUp(self):
+ super().setUp()
+ self.investigation = self.import_isa_from_file(SHEET_PATH, self.project)
+ self.study = self.investigation.studies.first()
+ # Set up POST data
+ self.values = {'updated_cells': []}
+ # Set up helpers
+ self.cache_backend = get_backend_api('sodar_cache')
+ self.cache_name = STUDY_TABLE_CACHE_ITEM.format(
+ study=self.study.sodar_uuid
+ )
+ self.cache_args = [APP_NAME, self.cache_name, self.project]
+
+ def test_post_material_name(self):
+ """Test SheetCellEditAjaxView POST with material name"""
obj = GenericMaterial.objects.get(study=self.study, name='0816')
new_name = '0816aaa'
self.values['updated_cells'].append(
@@ -694,8 +699,8 @@ def test_edit_name(self):
obj.refresh_from_db()
self.assertEqual(obj.name, new_name)
- def test_edit_name_empty(self):
- """Test setting an empty material name (should fail)"""
+ def test_post_material_name_empty(self):
+ """Test POST with empty material name (should fail)"""
obj = GenericMaterial.objects.get(study=self.study, name='0816')
self.values['updated_cells'].append(
{
@@ -719,8 +724,8 @@ def test_edit_name_empty(self):
obj.refresh_from_db()
self.assertEqual(obj.name, '0816')
- def test_edit_performer(self):
- """Test editing the performer of a process"""
+ def test_post_performer(self):
+ """Test POST with process performer"""
obj = Process.objects.filter(study=self.study, assay=None).first()
value = 'Alice Example '
self.values['updated_cells'].append(
@@ -745,8 +750,8 @@ def test_edit_performer(self):
obj.refresh_from_db()
self.assertEqual(obj.performer, value)
- def test_edit_perform_date(self):
- """Test editing the perform date of a process"""
+ def test_post_perform_date(self):
+ """Test POST with process perform date"""
obj = Process.objects.filter(study=self.study, assay=None).first()
value = '2020-07-07'
self.values['updated_cells'].append(
@@ -771,8 +776,8 @@ def test_edit_perform_date(self):
obj.refresh_from_db()
self.assertEqual(obj.perform_date.strftime('%Y-%m-%d'), value)
- def test_edit_perform_date_empty(self):
- """Test editing the perform date of a process with an empty date"""
+ def test_post_perform_date_empty(self):
+ """Test POST with empty process perform date"""
obj = Process.objects.filter(study=self.study, assay=None).first()
value = ''
self.values['updated_cells'].append(
@@ -797,8 +802,8 @@ def test_edit_perform_date_empty(self):
obj.refresh_from_db()
self.assertIsNone(obj.perform_date)
- def test_edit_perform_date_invalid(self):
- """Test editing the perform date of a process with an invalid date"""
+ def test_post_perform_date_invalid(self):
+ """Test POST with invalid perform date (should fail)"""
obj = Process.objects.filter(study=self.study, assay=None).first()
og_date = obj.perform_date
value = '2020-11-31'
@@ -824,8 +829,8 @@ def test_edit_perform_date_invalid(self):
obj.refresh_from_db()
self.assertEqual(obj.perform_date, og_date)
- def test_edit_characteristics_str(self):
- """Test editing a characteristics string value in a material"""
+ def test_post_characteristics_str(self):
+ """Test POST with material characteristics string value"""
obj = GenericMaterial.objects.get(study=self.study, name='0816')
header_name = 'organism'
self.assertNotEqual(
@@ -858,8 +863,8 @@ def test_edit_characteristics_str(self):
{'unit': None, 'value': EDIT_NEW_VALUE_STR},
)
- def test_edit_param_values_str(self):
- """Test editing a parameter values string value in a process"""
+ def test_post_param_values_str(self):
+ """Test POST with process parameter values string value"""
obj = Process.objects.filter(study=self.study, assay=None).first()
header_name = 'instrument'
self.assertNotEqual(
@@ -892,8 +897,8 @@ def test_edit_param_values_str(self):
{'unit': None, 'value': EDIT_NEW_VALUE_STR},
)
- def test_edit_protocol(self):
- """Test editing the protocol reference of a process"""
+ def test_post_protocol(self):
+ """Test POST with process protocol reference"""
obj = Process.objects.filter(
study=self.study, unique_name__icontains='sample collection'
).first()
@@ -926,8 +931,8 @@ def test_edit_protocol(self):
obj.refresh_from_db()
self.assertEqual(obj.protocol, new_protocol)
- def test_edit_process_name(self):
- """Test editing the name of a process"""
+ def test_post_process_name(self):
+ """Test POST with process name"""
obj = Process.objects.filter(
study=self.study, unique_name__icontains='sample collection'
).first()
@@ -960,8 +965,8 @@ def test_edit_process_name(self):
self.assertEqual(obj.name, name)
self.assertEqual(obj.name_type, name_type)
- def test_edit_ontology_term(self):
- """Test editing an ontology term with a single term"""
+ def test_post_ontology_term(self):
+ """Test POST with single ontology term"""
obj = GenericMaterial.objects.get(study=self.study, name='0817')
name = 'organism'
self.assertEqual(obj.characteristics[name], EMPTY_ONTOLOGY_VAL)
@@ -1000,8 +1005,8 @@ def test_edit_ontology_term(self):
obj.characteristics[name], self._convert_ontology_value(value)
)
- def test_edit_ontology_term_replace(self):
- """Test replacing an existing ontology term"""
+ def test_post_ontology_term_replace(self):
+ """Test POST to replace existing ontology term"""
obj = GenericMaterial.objects.get(study=self.study, name='0815')
name = 'organism'
og_value = {
@@ -1047,8 +1052,8 @@ def test_edit_ontology_term_replace(self):
obj.characteristics[name], self._convert_ontology_value(value)
)
- def test_edit_ontology_term_list(self):
- """Test editing an ontology term with a list of terms"""
+ def test_post_ontology_term_list(self):
+ """Test POST with list of ontology terms"""
obj = GenericMaterial.objects.get(study=self.study, name='0817')
name = 'organism'
value = [
@@ -1093,8 +1098,8 @@ def test_edit_ontology_term_list(self):
obj.characteristics[name], self._convert_ontology_value(value)
)
- def test_edit_ontology_term_empty(self):
- """Test editing an ontology term with an empty value"""
+ def test_post_ontology_term_empty(self):
+ """Test POST with empty ontology term value"""
obj = GenericMaterial.objects.get(study=self.study, name='0815')
name = 'organism'
og_value = {
@@ -1128,8 +1133,8 @@ def test_edit_ontology_term_empty(self):
obj.refresh_from_db()
self.assertEqual(obj.characteristics[name], EMPTY_ONTOLOGY_VAL)
- def test_edit_ontology_term_ref(self):
- """Test ontology source ref updating when editing term value"""
+ def test_post_ontology_term_ref(self):
+ """Test POST for ontology source ref updating when editing term value"""
# Set up ontology
obo_doc = fastobo.load(OBO_PATH)
ontology = OBOFormatOntologyIO().import_obo(
@@ -1203,8 +1208,8 @@ def test_edit_ontology_term_ref(self):
}
self.assertEqual(ref, expected)
- def test_edit_study_cache(self):
- """Test editing with cached study tables"""
+ def test_post_study_cache(self):
+ """Test POST with cached study tables"""
# Create cache item
study_tables = table_builder.build_study_tables(self.study)
cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid)
@@ -1258,8 +1263,8 @@ def setUp(self):
self.study = self.investigation.studies.first()
self.values = {'updated_cells': []}
- def test_edit_extract_label_string(self):
- """Test updating the extract label field with a string value"""
+ def test_post_extract_label_string(self):
+ """Test SheetCellEditAjaxView POST with extract label and string value"""
label = 'New label'
name_type = th.LABELED_EXTRACT_NAME
obj = GenericMaterial.objects.get(
@@ -1316,8 +1321,8 @@ def setUp(self):
)
self.cache_args = [APP_NAME, self.cache_name, self.project]
- def test_insert_study_row(self):
- """Test inserting a new row into a study"""
+ def test_post_study_row(self):
+ """Test SheetRowInsertAjaxView POST with study row"""
self.assertIsNone(
GenericMaterial.objects.filter(
study=self.study, name='0818'
@@ -1378,8 +1383,8 @@ def test_insert_study_row(self):
self.assertEqual(sample.characteristics['status']['value'], '0')
self.assertEqual(sample.factor_values['treatment']['value'], 'yes')
- def test_insert_assay_row(self):
- """Test inserting a new row into an assay"""
+ def test_post_assay_row(self):
+ """Test POST with assay row"""
# Insert study row
response = self.insert_row(path=STUDY_INSERT_PATH)
self.assertEqual(response.status_code, 200)
@@ -1430,8 +1435,8 @@ def test_insert_assay_row(self):
for i in range(len(node_names) - 1):
self.assertIn([node_names[i], node_names[i + 1]], self.assay.arcs)
- def test_insert_assay_row_split(self):
- """Test inserting a row with splitting into an assay"""
+ def test_post_assay_row_split(self):
+ """Test POST with assay row and splitting"""
# Insert study and assay rows
self.insert_row(path=STUDY_INSERT_PATH)
sample = GenericMaterial.objects.get(
@@ -1458,8 +1463,8 @@ def test_insert_assay_row_split(self):
)
self.assertEqual(len(self.assay.arcs), arc_count + 1)
- def test_insert_assay_row_pool(self):
- """Test inserting a row with pooling into an assay"""
+ def test_post_assay_row_pool_material(self):
+ """Test POST with assay row and pooling by data file material"""
self.insert_row(path=STUDY_INSERT_PATH)
sample = GenericMaterial.objects.get(
study=self.study, name=EDIT_SAMPLE_NAME
@@ -1472,7 +1477,7 @@ def test_insert_assay_row_pool(self):
mat_count = GenericMaterial.objects.filter(assay=self.assay).count()
proc_count = Process.objects.filter(assay=self.assay).count()
arc_count = len(self.assay.arcs)
- response = self.insert_row(path=ASSAY_INSERT_POOL_PATH)
+ response = self.insert_row(path=ASSAY_INSERT_POOL_MATERIAL_PATH)
self.assertEqual(response.status_code, 200)
self.assay.refresh_from_db()
@@ -1485,10 +1490,37 @@ def test_insert_assay_row_pool(self):
)
self.assertEqual(len(self.assay.arcs), arc_count + 7)
+ def test_post_assay_row_pool_process(self):
+ """Test POST with assay row and pooling by named process"""
+ self.insert_row(path=STUDY_INSERT_PATH)
+ sample = GenericMaterial.objects.get(
+ study=self.study, name=EDIT_SAMPLE_NAME
+ )
+ sample.sodar_uuid = EDIT_SAMPLE_UUID
+ sample.save()
+ self.insert_row(path=ASSAY_INSERT_PATH)
+
+ self.update_assay_row_uuids(update_sample=False)
+ mat_count = GenericMaterial.objects.filter(assay=self.assay).count()
+ proc_count = Process.objects.filter(assay=self.assay).count()
+ arc_count = len(self.assay.arcs)
+ response = self.insert_row(path=ASSAY_INSERT_POOL_PROCESS_PATH)
+
+ self.assertEqual(response.status_code, 200)
+ self.assay.refresh_from_db()
+ self.assertEqual(
+ GenericMaterial.objects.filter(assay=self.assay).count(),
+ mat_count + 4,
+ )
+ self.assertEqual(
+ Process.objects.filter(assay=self.assay).count(), proc_count + 1
+ )
+ self.assertEqual(len(self.assay.arcs), arc_count + 7)
+
# TODO: Test ontology ref updating
- def test_insert_study_cache(self):
- """Test inserting new row with cached study tables"""
+ def test_post_study_cache(self):
+ """Test POST with cached study tables"""
study_tables = table_builder.build_study_tables(self.study)
cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid)
self.cache_backend.set_cache_item(
@@ -1552,8 +1584,8 @@ def setUp(self):
)
self.cache_args = [APP_NAME, self.cache_name, self.project]
- def test_delete_assay_row(self):
- """Test row deletion from assay"""
+ def test_post_assay_row(self):
+ """Test SheetRowDeleteAjaxView POST with assay row"""
response = self.delete_row(ASSAY_DELETE_PATH)
self.assertEqual(response.status_code, 200)
self.assay.refresh_from_db()
@@ -1568,8 +1600,8 @@ def test_delete_assay_row(self):
for n in self.row_names:
self.assertNotIn(n, target_nodes)
- def test_delete_study_row(self):
- """Test row deletion from study"""
+ def test_post_study_row(self):
+ """Test POST with study row"""
# First delete the assay row
response = self.delete_row(ASSAY_DELETE_PATH)
self.assertEqual(response.status_code, 200)
@@ -1593,8 +1625,8 @@ def test_delete_study_row(self):
).first()
)
- def test_delete_study_row_in_use(self):
- """Test study row deletion with sample used in asssay (should fail)"""
+ def test_post_study_row_in_use(self):
+ """Test POST with study row containing sample used in asssay (should fail)"""
response = self.delete_row(STUDY_DELETE_PATH)
self.assertEqual(response.status_code, 500)
self.assertIsNotNone(
@@ -1615,8 +1647,8 @@ def test_delete_study_row_in_use(self):
# TODO: Test deletion with splitting/pooling
- def test_delete_assay_row_study_cache(self):
- """Test row deletion from assay with cached study tables"""
+ def test_post_assay_row_study_cache(self):
+ """Test POST with assay row and cached study tables"""
study_tables = table_builder.build_study_tables(self.study)
cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid)
self.cache_backend.set_cache_item(
@@ -1771,8 +1803,8 @@ def setUp(self):
)
self.cache_args = [APP_NAME, self.cache_name, self.project]
- def test_update_study_column(self):
- """Test posting a study column update"""
+ def test_post_study_column(self):
+ """Test SheetEditConfigAjaxView POST with study column"""
sheet_config = app_settings.get(
APP_NAME, 'sheet_config', project=self.project
)
@@ -1823,8 +1855,8 @@ def test_update_study_column(self):
1,
)
- def test_superuser_min_owner(self):
- """Test updating as superuser with minimum role of owner"""
+ def test_post_superuser_min_owner(self):
+ """Test POST as superuser with minimum role of owner"""
edit_config_min_role = app_settings.get(
APP_NAME, 'edit_config_min_role', project=self.project
)
@@ -1842,8 +1874,8 @@ def test_superuser_min_owner(self):
)
self.assertEqual(response.status_code, 200)
- def test_owner_min_owner(self):
- """Test updating as owner with minimum=owner"""
+ def test_post_owner_min_owner(self):
+ """Test POST as owner with minimum=owner"""
edit_config_min_role = app_settings.get(
APP_NAME, 'edit_config_min_role', project=self.project
)
@@ -1861,8 +1893,8 @@ def test_owner_min_owner(self):
)
self.assertEqual(response.status_code, 200)
- def test_delegate_min_owner(self):
- """Test updating as delegate with minimum=owner (should fail)"""
+ def test_post_delegate_min_owner(self):
+ """Test POST as delegate with minimum=owner (should fail)"""
edit_config_min_role = app_settings.get(
APP_NAME, 'edit_config_min_role', project=self.project
)
@@ -1884,8 +1916,8 @@ def test_delegate_min_owner(self):
'User not allowed to modify column config',
)
- def test_contributor_min_owner(self):
- """Test updating as contributor with minimum=owner (should fail)"""
+ def test_post_contributor_min_owner(self):
+ """Test POST as contributor with minimum=owner (should fail)"""
edit_config_min_role = app_settings.get(
APP_NAME, 'edit_config_min_role', project=self.project
)
@@ -1907,8 +1939,8 @@ def test_contributor_min_owner(self):
'User not allowed to modify column config',
)
- def test_inherited_owner(self):
- """Test updating as inherited owner"""
+ def test_post_inherited_owner(self):
+ """Test POST as inherited owner"""
with self.login(self.user_cat):
response = self.client.post(
reverse(
@@ -1920,8 +1952,8 @@ def test_inherited_owner(self):
)
self.assertEqual(response.status_code, 200)
- def test_update_study_cache(self):
- """Test posting a study column update with cached study tables"""
+ def test_post_study_cache(self):
+ """Test POST with study column and cached study tables"""
study_tables = table_builder.build_study_tables(self.study)
cache_name = STUDY_TABLE_CACHE_ITEM.format(study=self.study.sodar_uuid)
self.cache_backend.set_cache_item(
@@ -1983,7 +2015,7 @@ def setUp(self):
self.study_config = self.display_config['studies'][self.s_uuid]
def test_post(self):
- """Test updating the sheet configuration"""
+ """Test StudyDisplayConfigAjaxView POST"""
self.assertEqual(
self.study_config['nodes'][0]['fields'][2]['visible'], True
)
@@ -2013,7 +2045,7 @@ def test_post(self):
)
def test_post_default(self):
- """Test updating along with the default configuration"""
+ """Test POST with default configuration"""
self.assertEqual(
self.study_config['nodes'][0]['fields'][2]['visible'], True
)
@@ -2048,7 +2080,7 @@ def test_post_default(self):
self.assertEqual(updated_config, default_config)
def test_post_no_change(self):
- """Test posting with no updates"""
+ """Test POST with no updates"""
with self.login(self.user):
response = self.client.post(
reverse(
@@ -2099,7 +2131,7 @@ def setUp(self):
self.isa2.save()
def test_get(self):
- """Test GET returning diff data"""
+ """Test SheetVersionCompareAjaxView GET returning diff data"""
expected = {
'studies': {
's_small2.txt': [
diff --git a/samplesheets/tests/test_views_api_taskflow.py b/samplesheets/tests/test_views_api_taskflow.py
index 80c19ff4c..da68980ab 100644
--- a/samplesheets/tests/test_views_api_taskflow.py
+++ b/samplesheets/tests/test_views_api_taskflow.py
@@ -928,6 +928,20 @@ def test_patch(self):
self.assertEqual(self.request.path, self.obj_path)
self._assert_tl_count(1)
+ def test_patch_superuser(self):
+ """Test PATCH as superuser"""
+ self._assert_tl_count(0)
+ update_data = {'description': IRODS_REQUEST_DESC_UPDATED}
+ response = self.request_knox(
+ self.url, 'PATCH', data=update_data, token=self.get_token(self.user)
+ )
+ self.assertEqual(response.status_code, 200)
+ self.request.refresh_from_db()
+ self.assertEqual(self.request.description, IRODS_REQUEST_DESC_UPDATED)
+ self.assertEqual(self.request.path, self.obj_path)
+ self.assertEqual(self.request.user, self.user_contributor)
+ self._assert_tl_count(1)
+
# NOTE: For TestIrodsDataRequestDestroyAPIView, see test_views_api
diff --git a/samplesheets/tests/test_views_taskflow.py b/samplesheets/tests/test_views_taskflow.py
index c8cfc757d..3671a7f35 100644
--- a/samplesheets/tests/test_views_taskflow.py
+++ b/samplesheets/tests/test_views_taskflow.py
@@ -20,6 +20,7 @@
from projectroles.app_settings import AppSettingAPI
from projectroles.models import SODAR_CONSTANTS
from projectroles.plugins import get_backend_api
+from projectroles.views import MSG_NO_AUTH
# Appalerts dependency
from appalerts.models import AppAlert
@@ -1160,10 +1161,10 @@ def setUp(self):
class TestIrodsDataRequestCreateView(IrodsDataRequestViewTestBase):
- """Test IrodsDataRequestCreateView"""
+ """Tests for IrodsDataRequestCreateView"""
- def test_create(self):
- """Test creating a delete request"""
+ def test_post(self):
+ """Test IrodsDataRequestCreateView POST"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self._assert_tl_count(EVENT_CREATE, 0)
self._assert_alert_count(EVENT_CREATE, self.user, 0)
@@ -1194,11 +1195,19 @@ def test_create(self):
self.assertEqual(obj.path, self.obj_path)
self.assertEqual(obj.description, IRODS_REQUEST_DESC)
self._assert_tl_count(EVENT_CREATE, 1)
+ self.assertEqual(
+ ProjectEvent.objects.get(event_name=EVENT_CREATE).extra_data,
+ {
+ 'action': IRODS_REQUEST_ACTION_DELETE,
+ 'path': obj.path,
+ 'description': obj.description,
+ },
+ )
self._assert_alert_count(EVENT_CREATE, self.user, 1)
self._assert_alert_count(EVENT_CREATE, self.user_delegate, 1)
- def test_create_trailing_slash(self):
- """Test creating a delete request with trailing slash in path"""
+ def test_post_trailing_slash(self):
+ """Test POST with trailing slash in path"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
post_data = {
'path': self.obj_path + '/',
@@ -1230,8 +1239,8 @@ def test_create_trailing_slash(self):
self.assertEqual(obj.path, self.obj_path)
self.assertEqual(obj.description, IRODS_REQUEST_DESC)
- def test_create_invalid_form_data(self):
- """Test creating a delete request with invalid form data"""
+ def test_post_invalid_data(self):
+ """Test POST with invalid form data"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self._assert_tl_count(EVENT_CREATE, 0)
self._assert_alert_count(EVENT_CREATE, self.user, 0)
@@ -1256,8 +1265,8 @@ def test_create_invalid_form_data(self):
self._assert_alert_count(EVENT_CREATE, self.user, 0)
self._assert_alert_count(EVENT_CREATE, self.user_delegate, 0)
- def test_create_invalid_path_assay_collection(self):
- """Test creating a delete request with assay path (should fail)"""
+ def test_post_assay_path(self):
+ """Test POST with assay path (should fail)"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
post_data = {'path': self.assay_path, 'description': IRODS_REQUEST_DESC}
@@ -1277,7 +1286,7 @@ def test_create_invalid_path_assay_collection(self):
self.assertEqual(IrodsDataRequest.objects.count(), 0)
def test_create_multiple(self):
- """Test creating multiple_requests"""
+ """Test POST with multiple requests for same path"""
path2 = os.path.join(self.assay_path, IRODS_FILE_NAME2)
self.irods.data_objects.create(path2)
self.assertEqual(IrodsDataRequest.objects.count(), 0)
@@ -1323,9 +1332,13 @@ def setUp(self):
user=self.user_contributor,
description=IRODS_REQUEST_DESC,
)
+ self.url = reverse(
+ 'samplesheets:irods_request_update',
+ kwargs={'irodsdatarequest': self.request.sodar_uuid},
+ )
- def test_update(self):
- """Test POST request for updating a delete request"""
+ def test_post(self):
+ """Test IrodsDataRequestUpdateView POST"""
self.assertEqual(IrodsDataRequest.objects.count(), 1)
self._assert_tl_count(EVENT_UPDATE, 0)
@@ -1334,13 +1347,7 @@ def test_update(self):
'description': IRODS_REQUEST_DESC_UPDATE,
}
with self.login(self.user_contributor):
- response = self.client.post(
- reverse(
- 'samplesheets:irods_request_update',
- kwargs={'irodsdatarequest': self.request.sodar_uuid},
- ),
- update_data,
- )
+ response = self.client.post(self.url, update_data)
self.assertRedirects(
response,
reverse(
@@ -1360,9 +1367,37 @@ def test_update(self):
self.assertEqual(self.request.path, self.obj_path)
self.assertEqual(self.request.description, IRODS_REQUEST_DESC_UPDATE)
self._assert_tl_count(EVENT_UPDATE, 1)
+ self.assertEqual(
+ ProjectEvent.objects.get(event_name=EVENT_UPDATE).extra_data,
+ {
+ 'action': IRODS_REQUEST_ACTION_DELETE,
+ 'path': self.request.path,
+ 'description': self.request.description,
+ },
+ )
- def test_post_update_invalid_form_data(self):
- """Test updating a delete request with invalid form data"""
+ def test_post_superuser(self):
+ """Test POST as superuser"""
+ self.assertEqual(IrodsDataRequest.objects.count(), 1)
+ self._assert_tl_count(EVENT_UPDATE, 0)
+
+ update_data = {
+ 'path': self.obj_path,
+ 'description': IRODS_REQUEST_DESC_UPDATE,
+ }
+ with self.login(self.user):
+ self.client.post(self.url, update_data)
+
+ self.assertEqual(IrodsDataRequest.objects.count(), 1)
+ self.request.refresh_from_db()
+ self.assertEqual(self.request.path, self.obj_path)
+ self.assertEqual(self.request.description, IRODS_REQUEST_DESC_UPDATE)
+ # Assert user is not updated when superuser updates the request
+ self.assertEqual(self.request.user, self.user_contributor)
+ self._assert_tl_count(EVENT_UPDATE, 1)
+
+ def test_post_invalid_form_data(self):
+ """Test POST with invalid form data"""
self.assertEqual(IrodsDataRequest.objects.count(), 1)
self._assert_tl_count(EVENT_UPDATE, 0)
@@ -1371,13 +1406,7 @@ def test_post_update_invalid_form_data(self):
'description': IRODS_REQUEST_DESC_UPDATE,
}
with self.login(self.user_contributor):
- response = self.client.post(
- reverse(
- 'samplesheets:irods_request_update',
- kwargs={'irodsdatarequest': self.request.sodar_uuid},
- ),
- update_data,
- )
+ response = self.client.post(self.url, update_data)
self.assertEqual(
response.context['form'].errors['path'][0],
@@ -1393,7 +1422,7 @@ def test_post_update_invalid_form_data(self):
class TestIrodsDataRequestDeleteView(
IrodsDataRequestMixin, IrodsDataRequestViewTestBase
):
- """Tests for IrodsDataRequestUpdateView"""
+ """Tests for IrodsDataRequestDeleteView"""
def setUp(self):
super().setUp()
@@ -1402,8 +1431,8 @@ def setUp(self):
kwargs={'project': self.project.sodar_uuid},
)
- def test_delete_contributor(self):
- """Test POST request for deleting a request"""
+ def test_post(self):
+ """Test IrodsDataRequestDeleteView POST"""
# NOTE: We use post() to ensure alerts are created
with self.login(self.user_contributor):
self.client.post(self.create_url, self.post_data)
@@ -1435,13 +1464,16 @@ def test_delete_contributor(self):
)
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self.assert_irods_obj(self.obj_path)
- # Create requests should be deleted
self._assert_tl_count(EVENT_DELETE, 1)
+ self.assertEqual(
+ ProjectEvent.objects.get(event_name=EVENT_DELETE).extra_data, {}
+ )
+ # Create alerts should be deleted
self._assert_alert_count(EVENT_CREATE, self.user, 0)
self._assert_alert_count(EVENT_CREATE, self.user_delegate, 0)
- def test_delete_one_of_multiple(self):
- """Test deleting one of multiple requests"""
+ def test_post_one_of_multiple(self):
+ """Test POST for one of multiple requests"""
self._assert_tl_count(EVENT_DELETE, 0)
obj_path2 = os.path.join(self.assay_path, IRODS_FILE_NAME2)
self.irods.data_objects.create(obj_path2)
@@ -1487,8 +1519,8 @@ def setUp(self):
kwargs={'project': self.project.sodar_uuid},
)
- def test_render(self):
- """Test rendering IrodsDataRequestAcceptView"""
+ def test_get(self):
+ """Test IrodsDataRequestAcceptView GET"""
self.assert_irods_obj(self.obj_path)
self.make_irods_request(
project=self.project,
@@ -1510,8 +1542,8 @@ def test_render(self):
)
self.assertEqual(response.context['request_objects'][0], obj)
- def test_render_coll(self):
- """Test rendering IrodsDataRequestAcceptView with collection request"""
+ def test_get_coll(self):
+ """Test GET with collection request"""
coll_path = os.path.join(self.assay_path, 'request_coll')
self.irods.collections.create(coll_path)
self.assertEqual(self.irods.collections.exists(coll_path), True)
@@ -1534,8 +1566,8 @@ def test_render_coll(self):
)
self.assertEqual(response.context['request_objects'][0], request)
- def test_accept(self):
- """Test accepting a delete request"""
+ def test_post(self):
+ """Test POST to accept delete request"""
self.assert_irods_obj(self.obj_path)
with self.login(self.user_contributor):
self.client.post(self.create_url, self.post_data)
@@ -1576,8 +1608,8 @@ def test_accept(self):
self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0)
self.assert_irods_obj(self.obj_path, False)
- def test_accept_no_request(self):
- """Test accepting non-existing delete request"""
+ def test_post_no_request(self):
+ """Test POST with non-existing delete request"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_owner_cat):
response = self.client.post(
@@ -1589,8 +1621,8 @@ def test_accept_no_request(self):
)
self.assertEqual(response.status_code, 404)
- def test_accept_invalid_form_data(self):
- """Test accepting delete request with invalid form data"""
+ def test_post_invalid_data(self):
+ """Test POST with invalid form data"""
self.assert_irods_obj(self.obj_path)
with self.login(self.user_contributor):
self.client.post(self.create_url, self.post_data)
@@ -1623,8 +1655,8 @@ def test_accept_invalid_form_data(self):
self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0)
self.assert_irods_obj(self.obj_path)
- def test_accept_owner(self):
- """Test accepting delete request as owner"""
+ def test_post_as_owner(self):
+ """Test POST as owner"""
self.assert_irods_obj(self.obj_path)
request = self.make_irods_request(
project=self.project,
@@ -1653,8 +1685,8 @@ def test_accept_owner(self):
self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0)
self._assert_alert_count(EVENT_ACCEPT, self.user_contributor, 1)
- def test_accept_delegate(self):
- """Test accepting delete request as delegate"""
+ def test_post_delegate(self):
+ """Test POST as delegate"""
self.assert_irods_obj(self.obj_path)
request = self.make_irods_request(
project=self.project,
@@ -1683,8 +1715,8 @@ def test_accept_delegate(self):
self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0)
self._assert_alert_count(EVENT_ACCEPT, self.user_contributor, 1)
- def test_accept_contributor(self):
- """Test accepting delete request as contributor"""
+ def test_post_contributor(self):
+ """Test POST as contributor"""
self.assert_irods_obj(self.obj_path)
request = self.make_irods_request(
project=self.project,
@@ -1714,8 +1746,8 @@ def test_accept_contributor(self):
self._assert_alert_count(EVENT_ACCEPT, self.user_delegate, 0)
self._assert_alert_count(EVENT_ACCEPT, self.user_contributor, 0)
- def test_accept_one_of_multiple(self):
- """Test accepting one of multiple requests"""
+ def test_post_one_of_multiple(self):
+ """Test POST for one of multiple requests"""
obj_path2 = os.path.join(self.assay_path, IRODS_FILE_NAME2)
self.irods.data_objects.create(obj_path2)
self.assert_irods_obj(self.obj_path)
@@ -1759,8 +1791,8 @@ def test_accept_one_of_multiple(self):
self.assert_irods_obj(obj_path2, False)
@override_settings(REDIS_URL=INVALID_REDIS_URL)
- def test_accept_lock_failure(self):
- """Test accepting a delete request with project lock failure"""
+ def test_post_lock_failure(self):
+ """Test POST with project lock failure"""
self.assert_irods_obj(self.obj_path)
with self.login(self.user_contributor):
self.client.post(self.create_url, self.post_data)
@@ -1791,8 +1823,8 @@ def test_accept_lock_failure(self):
self._assert_alert_count(EVENT_CREATE, self.user_delegate, 1)
self.assert_irods_obj(self.obj_path, True)
- def test_accept_collection(self):
- """Test accepting a collection request with multiple objects inside"""
+ def test_post_collection(self):
+ """Test POST with multiple objects in collection"""
coll_path = os.path.join(self.assay_path, 'request_coll')
obj_path2 = os.path.join(coll_path, IRODS_FILE_NAME)
self.irods.collections.create(coll_path)
@@ -1845,8 +1877,8 @@ def setUp(self):
kwargs={'project': self.project.sodar_uuid},
)
- def test_render(self):
- """Test rendering IrodsDataRequestAcceptBatchView"""
+ def test_get(self):
+ """Test IrodsDataRequestAcceptBatchView GET"""
self.assert_irods_obj(self.obj_path)
self.make_irods_request(
project=self.project,
@@ -1898,8 +1930,8 @@ def test_render(self):
IrodsDataRequest.objects.last(),
)
- def test_render_coll(self):
- """Test rendering IrodsDataRequestAcceptBatchView for a collection"""
+ def test_get_coll(self):
+ """Test GET with collection"""
coll_path = os.path.join(self.assay_path, 'request_coll')
self.irods.collections.create(coll_path)
self.assertEqual(self.irods.collections.exists(coll_path), True)
@@ -1933,8 +1965,8 @@ def test_render_coll(self):
response.context['affected_object_paths'][0], coll_path
)
- def test_accept(self):
- """Test accepting a delete request"""
+ def test_post(self):
+ """Test POST to accept delete requests"""
self.assert_irods_obj(self.obj_path)
self.assert_irods_obj(self.obj_path2)
@@ -2001,8 +2033,8 @@ def test_accept(self):
self.assert_irods_obj(self.obj_path, False)
self.assert_irods_obj(self.obj_path2, False)
- def test_accept_no_request(self):
- """Test accepting a delete request that doesn't exist"""
+ def test_post_no_request(self):
+ """Test POST with non-existing delete request"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_owner_cat):
response = self.client.post(
@@ -2018,8 +2050,8 @@ def test_accept_no_request(self):
NO_REQUEST_MSG,
)
- def test_accept_invalid_form_data(self):
- """Test accepting a delete request with invalid form data"""
+ def test_post_invalid_data(self):
+ """Test POST with invalid form data"""
self.assert_irods_obj(self.obj_path)
self.assert_irods_obj(self.obj_path2)
@@ -2061,8 +2093,8 @@ def test_accept_invalid_form_data(self):
self.assert_irods_obj(self.obj_path2)
@override_settings(REDIS_URL=INVALID_REDIS_URL)
- def test_accept_lock_failure(self):
- """Test accepting a delete request with redis lock failure"""
+ def test_post_lock_failure(self):
+ """Test POST with project lock failure"""
self.assert_irods_obj(self.obj_path)
self.assert_irods_obj(self.obj_path2)
@@ -2122,8 +2154,8 @@ def setUp(self):
kwargs={'project': self.project.sodar_uuid},
)
- def test_reject_admin(self):
- """Test rejecting delete request as admin"""
+ def test_get_admin(self):
+ """Test IrodsDataRequestRejectView GET as admin"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self.assert_irods_obj(self.obj_path)
self._assert_alert_count(EVENT_REJECT, self.user, 0)
@@ -2165,8 +2197,8 @@ def test_reject_admin(self):
self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0)
self._assert_alert_count(EVENT_REJECT, self.user_contributor, 1)
- def test_reject_owner(self):
- """Test rejecting delete request as owner"""
+ def test_get_owner(self):
+ """Test GET as owner"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
request = self.make_irods_request(
project=self.project,
@@ -2190,8 +2222,8 @@ def test_reject_owner(self):
self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0)
self._assert_alert_count(EVENT_REJECT, self.user_contributor, 1)
- def test_reject_delegate(self):
- """Test rejecting delete request as delegate"""
+ def test_get_delegate(self):
+ """Test GET as delegate"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
request = self.make_irods_request(
project=self.project,
@@ -2215,8 +2247,8 @@ def test_reject_delegate(self):
self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0)
self._assert_alert_count(EVENT_REJECT, self.user_contributor, 1)
- def test_reject_contributor(self):
- """Test rejecting delete request as contributor"""
+ def test_get_contributor(self):
+ """Test GET as contributor"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
request = self.make_irods_request(
project=self.project,
@@ -2236,8 +2268,7 @@ def test_reject_contributor(self):
self.assertRedirects(response, reverse('home'))
self.assertEqual(
- list(get_messages(response.wsgi_request))[-1].message,
- 'User not authorized for requested action.',
+ list(get_messages(response.wsgi_request))[-1].message, MSG_NO_AUTH
)
request.refresh_from_db()
self.assertEqual(request.status, IRODS_REQUEST_STATUS_ACTIVE)
@@ -2245,8 +2276,8 @@ def test_reject_contributor(self):
self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0)
self._assert_alert_count(EVENT_REJECT, self.user_contributor, 0)
- def test_reject_one_of_multiple(self):
- """Test rejecting one of multiple requests"""
+ def test_get_one_of_multiple(self):
+ """Test GET with one of multiple requests"""
obj_path2 = os.path.join(self.assay_path, IRODS_FILE_NAME2)
self.irods.data_objects.create(obj_path2)
self.assert_irods_obj(self.obj_path)
@@ -2287,8 +2318,8 @@ def test_reject_one_of_multiple(self):
self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0)
self._assert_alert_count(EVENT_REJECT, self.user_contributor, 1)
- def test_reject_no_request(self):
- """Test rejecting delete request that doesn't exist"""
+ def test_get_no_request(self):
+ """Test GET with non-existing delete request"""
with self.login(self.user_owner_cat):
response = self.client.get(
reverse(
@@ -2315,8 +2346,8 @@ def setUp(self):
kwargs={'project': self.project.sodar_uuid},
)
- def test_reject(self):
- """Test rejecting delete request"""
+ def test_post(self):
+ """Test IrodsDataRequestRejectBatchView POST"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self._assert_alert_count(EVENT_REJECT, self.user, 0)
self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0)
@@ -2366,8 +2397,8 @@ def test_reject(self):
self._assert_alert_count(EVENT_REJECT, self.user_delegate, 0)
self._assert_alert_count(EVENT_REJECT, self.user_contributor, 0)
- def test_reject_no_request(self):
- """Test rejecting delete request that doesn't exist"""
+ def test_post_no_request(self):
+ """Test POST with non-existing request"""
with self.login(self.user_owner_cat):
response = self.client.post(
self.reject_url,
diff --git a/samplesheets/views.py b/samplesheets/views.py
index 55a03e468..272724130 100644
--- a/samplesheets/views.py
+++ b/samplesheets/views.py
@@ -12,6 +12,7 @@
from cubi_isa_templates import _TEMPLATES as ISA_TEMPLATES
from irods.exception import CollectionDoesNotExist
from packaging import version
+from urllib.parse import urljoin
from django.conf import settings
from django.contrib import messages
@@ -685,15 +686,10 @@ def create_colls(self, investigation, request=None, sync=False):
'ticket_str': ticket_str,
}
taskflow.submit(
- project=project,
- flow_name='sheet_colls_create',
- flow_data=flow_data,
+ project=project, flow_name='sheet_colls_create', flow_data=flow_data
)
app_settings.set(
- APP_NAME,
- 'public_access_ticket',
- ticket_str,
- project=project,
+ APP_NAME, 'public_access_ticket', ticket_str, project=project
)
if tl_event:
tl_event.set_status('OK')
@@ -793,43 +789,30 @@ def __init__(self):
# Timeline helpers ---------------------------------------------------------
- def add_tl_create(self, irods_request):
- """
- Create timeline event for iRODS data request creation.
-
- :param irods_request: IrodsDataRequest object
- """
- if not self.timeline:
- return
- tl_event = self.timeline.add_event(
- project=irods_request.project,
- app_name=APP_NAME,
- user=irods_request.user,
- event_name=IRODS_REQUEST_EVENT_CREATE,
- description='create iRODS data request {irods_request}',
- status_type='OK',
- )
- tl_event.add_object(
- obj=irods_request,
- label='irods_request',
- name=irods_request.get_display_name(),
- )
-
- def add_tl_update(self, irods_request):
+ def add_tl_event(self, irods_request, action):
"""
- Create timeline event for iRODS data request update.
+ Create timeline event for iRODS data request modification.
:param irods_request: IrodsDataRequest object
+ :param action: 'create', 'update' or 'delete' (string)
"""
if not self.timeline:
return
+ extra_data = {}
+ if action in ['create', 'update']:
+ extra_data = {
+ 'action': irods_request.action,
+ 'path': irods_request.path,
+ 'description': irods_request.description,
+ }
tl_event = self.timeline.add_event(
project=irods_request.project,
app_name=APP_NAME,
user=irods_request.user,
- event_name=IRODS_REQUEST_EVENT_UPDATE,
- description='update iRODS data request {irods_request}',
+ event_name='irods_request_{}'.format(action),
+ description=action + ' iRODS data request {irods_request}',
status_type='OK',
+ extra_data=extra_data,
)
tl_event.add_object(
obj=irods_request,
@@ -837,28 +820,6 @@ def add_tl_update(self, irods_request):
name=irods_request.get_display_name(),
)
- def add_tl_delete(self, irods_request):
- """
- Create timeline event for iRODS data request deletion.
-
- :param irods_request: IrodsDataRequest object
- """
- if not self.timeline:
- return
- tl_event = self.timeline.add_event(
- project=irods_request.project,
- app_name=APP_NAME,
- user=irods_request.user,
- event_name=IRODS_REQUEST_EVENT_DELETE,
- description='delete iRODS data request {irods_request}',
- status_type='OK',
- )
- tl_event.add_object(
- obj=irods_request,
- label='irods_request',
- name=str(irods_request),
- )
-
# App Alert Helpers --------------------------------------------------------
def add_alerts_create(self, project, app_alerts=None):
@@ -1160,6 +1121,21 @@ def get_irods_request_objects(self):
return IrodsDataRequest.objects.none()
return IrodsDataRequest.objects.filter(sodar_uuid__in=request_ids)
+ def add_error_message(self, obj, ex):
+ """
+ Add Django error message for iRODS data request exception.
+
+ :param obj: IrodsDataRequest object
+ :param ex: Exception
+ """
+ ex_msg = '; '.join(ex) if isinstance(ex, list) else str(ex)
+ messages.error(
+ self.request,
+ 'Accepting iRODS data request "{}" failed: {}'.format(
+ obj.get_display_name(), ex_msg
+ ),
+ )
+
class SheetRemoteSyncAPI(SheetImportMixin):
"""
@@ -1491,12 +1467,8 @@ def get_context_data(self, *args, **kwargs):
def get_form_kwargs(self):
"""Pass kwargs to form"""
kwargs = super().get_form_kwargs()
- kwargs.update(
- {
- 'project': self.get_project(),
- 'sheet_tpl': self._get_sheet_template(),
- }
- )
+ kwargs['project'] = self.get_project()
+ kwargs['sheet_tpl'] = self._get_sheet_template()
return kwargs
def form_valid(self, form):
@@ -1763,9 +1735,7 @@ def post(self, request, *args, **kwargs):
tl_event.set_status('SUBMIT')
try:
taskflow.submit(
- project=project,
- flow_name='sheet_delete',
- flow_data={},
+ project=project, flow_name='sheet_delete', flow_data={}
)
except taskflow.FlowSubmitException as ex:
delete_success = False
@@ -2203,17 +2173,14 @@ def post(self, request, **kwargs):
status_type='OK',
)
tl_event.add_object(
- obj=sv,
- label='isatab',
- name=sv.get_full_name(),
+ obj=sv, label='isatab', name=sv.get_full_name()
)
context['sheet_versions'].delete()
messages.success(
request,
'Deleted {} sample sheet version{}.'.format(
- version_count,
- 's' if version_count != 1 else '',
+ version_count, 's' if version_count != 1 else ''
),
)
return redirect(
@@ -2380,6 +2347,76 @@ def delete(self, request, *args, **kwargs):
return super().delete(request, *args, **kwargs)
+class IrodsDataRequestListView(
+ LoginRequiredMixin,
+ LoggedInPermissionMixin,
+ ProjectPermissionMixin,
+ InvestigationContextMixin,
+ ListView,
+):
+ """View for listing iRODS data requests"""
+
+ model = IrodsDataRequest
+ permission_required = 'samplesheets.edit_sheet'
+ template_name = 'samplesheets/irods_requests.html'
+ paginate_by = settings.SHEETS_IRODS_REQUEST_PAGINATION
+
+ @classmethod
+ def get_item_extra_data(cls, irods_session, item):
+ if settings.IRODS_WEBDAV_ENABLED:
+ item.webdav_url = urljoin(settings.IRODS_WEBDAV_URL, item.path)
+ else:
+ item.webdav_url = None
+ item.is_collection = irods_session.collections.exists(item.path)
+
+ def get_context_data(self, *args, **kwargs):
+ context_data = super().get_context_data(*args, **kwargs)
+ irods = get_backend_api('omics_irods')
+ with irods.get_session() as irods_session:
+ if settings.IRODS_WEBDAV_ENABLED:
+ for item in context_data['object_list']:
+ self.get_item_extra_data(irods_session, item)
+ assign = RoleAssignment.objects.filter(
+ project=self.get_project(),
+ user=self.request.user,
+ role__name=SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR'],
+ )
+ context_data['is_contributor'] = bool(assign)
+ context_data['irods_webdav_enabled'] = settings.IRODS_WEBDAV_ENABLED
+ return context_data
+
+ def get_queryset(self):
+ project = self.get_project()
+ queryset = self.model.objects.filter(project=project)
+ # For superusers, owners and delegates,
+ # display active/failed requests from all users
+ if (
+ self.request.user.is_superuser
+ or project.is_delegate(self.request.user)
+ or project.is_owner(self.request.user)
+ ):
+ return queryset.filter(
+ status__in=[
+ IRODS_REQUEST_STATUS_ACTIVE,
+ IRODS_REQUEST_STATUS_FAILED,
+ ]
+ )
+ # For regular users, dispaly their own requests regardless of status
+ return queryset.filter(user=self.request.user)
+
+ def get(self, request, *args, **kwargs):
+ irods_backend = get_backend_api('omics_irods')
+ if not irods_backend:
+ messages.error(request, 'iRODS backend not enabled')
+ return redirect(
+ reverse(
+ 'samplesheets:project_sheets',
+ kwargs={'project': self.get_project().sodar_uuid},
+ )
+ )
+ return super().get(request, *args, **kwargs)
+
+
class IrodsDataRequestCreateView(
LoginRequiredMixin,
LoggedInPermissionMixin,
@@ -2404,11 +2441,12 @@ def form_valid(self, form):
project = self.get_project()
# Create database object
obj = form.save(commit=False)
+ # TODO: These should happen in the form instead (see #1865)
obj.user = self.request.user
obj.project = project
obj.save()
# Create timeline event
- self.add_tl_create(obj)
+ self.add_tl_event(obj, 'create')
# Add app alerts to owners/delegates
self.add_alerts_create(project)
messages.success(
@@ -2455,11 +2493,8 @@ def get_form_kwargs(self):
return kwargs
def form_valid(self, form):
- obj = form.save(commit=False)
- obj.user = self.request.user
- obj.project = self.get_project()
- obj.save()
- self.add_tl_update(obj)
+ obj = form.save()
+ self.add_tl_event(obj, 'update')
messages.success(
self.request,
'iRODS data request "{}" updated.'.format(obj.get_display_name()),
@@ -2498,7 +2533,7 @@ def has_permission(self):
def get_success_url(self):
# Add timeline event
- self.add_tl_delete(self.object)
+ self.add_tl_event(self.object, 'delete')
# Handle project alerts
self.handle_alerts_deactivate(self.object)
messages.success(self.request, 'iRODS data request deleted.')
@@ -2598,12 +2633,7 @@ def post(self, request, *args, **kwargs):
),
)
except Exception as ex:
- messages.error(
- self.request,
- 'Accepting iRODS data request "{}" failed: {}'.format(
- obj.get_display_name(), ex
- ),
- )
+ self.add_error_message(obj, ex)
return redirect(
reverse(
'samplesheets:irods_requests',
@@ -2697,12 +2727,7 @@ def post(self, request, *args, **kwargs):
),
)
except Exception as ex:
- messages.error(
- self.request,
- 'Accepting iRODS data request "{}" failed: {}'.format(
- obj.get_display_name(), ex
- ),
- )
+ self.add_error_message(obj, ex)
return redirect(
reverse(
@@ -2801,10 +2826,7 @@ def post(self, request, *args, **kwargs):
try:
batch = self.get_irods_request_objects()
if not batch:
- messages.error(
- self.request,
- NO_REQUEST_MSG,
- )
+ messages.error(self.request, NO_REQUEST_MSG)
return redirect(
reverse(
'samplesheets:irods_requests',
@@ -2850,80 +2872,6 @@ def post(self, request, *args, **kwargs):
)
-class IrodsDataRequestListView(
- LoginRequiredMixin,
- LoggedInPermissionMixin,
- ProjectPermissionMixin,
- InvestigationContextMixin,
- ListView,
-):
- """View for listing iRODS data requests"""
-
- model = IrodsDataRequest
- permission_required = 'samplesheets.edit_sheet'
- template_name = 'samplesheets/irods_requests.html'
- paginate_by = settings.SHEETS_IRODS_REQUEST_PAGINATION
-
- def get_context_data(self, *args, **kwargs):
- context_data = super().get_context_data(*args, **kwargs)
- irods = get_backend_api('omics_irods')
- with irods.get_session() as irods_session:
- if settings.IRODS_WEBDAV_ENABLED:
- for item in context_data['object_list']:
- self.get_extra_item_data(irods_session, item)
- assign = RoleAssignment.objects.filter(
- project=self.get_project(),
- user=self.request.user,
- role__name=SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR'],
- )
- context_data['is_contributor'] = bool(assign)
- context_data['irods_webdav_enabled'] = settings.IRODS_WEBDAV_ENABLED
- return context_data
-
- def get_queryset(self):
- project = self.get_project()
- queryset = self.model.objects.filter(project=project)
- # For superusers, owners and delegates,
- # display active/failed requests from all users
- if (
- self.request.user.is_superuser
- or project.is_delegate(self.request.user)
- or project.is_owner(self.request.user)
- ):
- return queryset.filter(
- status__in=[
- IRODS_REQUEST_STATUS_ACTIVE,
- IRODS_REQUEST_STATUS_FAILED,
- ]
- )
- # For regular users, dispaly their own requests regardless of status
- return queryset.filter(user=self.request.user)
-
- def build_webdav_url(self, item):
- return '{}/{}'.format(settings.IRODS_WEBDAV_URL, item.path)
-
- def get_extra_item_data(self, irods_session, item):
- # Add webdav URL to the item
- if settings.IRODS_WEBDAV_ENABLED:
- item.webdav_url = self.build_webdav_url(item)
- else:
- item.webdav_url = None
- # Check if the item is a collection
- item.is_collection = irods_session.collections.exists(item.path)
-
- def get(self, request, *args, **kwargs):
- irods_backend = get_backend_api('omics_irods')
- if not irods_backend:
- messages.error(request, 'iRODS backend not enabled')
- return redirect(
- reverse(
- 'samplesheets:project_sheets',
- kwargs={'project': self.get_project().sodar_uuid},
- )
- )
- return super().get(request, *args, **kwargs)
-
-
class SheetRemoteSyncView(
LoginRequiredMixin,
LoggedInPermissionMixin,
diff --git a/samplesheets/views_ajax.py b/samplesheets/views_ajax.py
index 19b2b9964..cab0f0a70 100644
--- a/samplesheets/views_ajax.py
+++ b/samplesheets/views_ajax.py
@@ -544,9 +544,7 @@ def _get_display_config(self, investigation, user, sheet_config=None):
'using default..'.format(user.username)
)
display_config = app_settings.get(
- APP_NAME,
- 'display_config_default',
- project=project,
+ APP_NAME, 'display_config_default', project=project
)
# If default display configuration is not found, build it
@@ -999,10 +997,11 @@ def _get_name(cls, node):
"""
if node['cells'][0]['obj_cls'] == 'Process':
for cell in node['cells']:
- if cell['header_type'] == 'process_name':
+ if cell.get('header_type') == 'process_name':
return cell['value']
else: # Material
return node['cells'][0]['value']
+ return None
@classmethod
def _add_node_attr(cls, node_obj, cell):
@@ -1191,8 +1190,15 @@ def _insert_row(self, row):
)
)
+ # TODO: We create duplicate rows with named processes, fix!
+
# Check if we'll need to consider collapsing of unnamed nodes
- if len([n for n in row['nodes'] if not self._get_name(n)]) > 0:
+ # (References to existing nodes with UUID will not have to be collapsed)
+ if [
+ n
+ for n in row['nodes']
+ if not n['cells'][0].get('uuid') and not self._get_name(n)
+ ]:
logger.debug('Unnamed node(s) in row, will attempt collapsing')
collapse = True
try:
@@ -1390,6 +1396,8 @@ def post(self, request, *args, **kwargs):
except Exception as ex:
return Response({'detail': str(ex)}, status=500)
except Exception as ex:
+ if settings.DEBUG:
+ raise (ex)
return Response({'detail': str(ex)}, status=500)
return Response(self.ok_data, status=200)
@@ -1869,11 +1877,11 @@ def post(self, request, *args, **kwargs):
path=path,
user=request.user,
project=project,
- description='Request created via Ajax API',
+ description='Request created in iRODS file list',
)
# Create timeline event
- self.add_tl_create(irods_request)
+ self.add_tl_event(irods_request, 'create')
# Add app alerts to owners/delegates
self.add_alerts_create(project)
return Response(
@@ -1911,16 +1919,12 @@ def post(self, request, *args, **kwargs):
)
# Add timeline event
- self.add_tl_delete(irods_request)
+ self.add_tl_event(irods_request, 'delete')
# Handle project alerts
self.handle_alerts_deactivate(irods_request)
irods_request.delete()
return Response(
- {
- 'detail': 'ok',
- 'status': None,
- 'user': None,
- },
+ {'detail': 'ok', 'status': None, 'user': None},
status=200,
)
@@ -1942,18 +1946,18 @@ def get(self, request, *args, **kwargs):
# Get files
try:
with irods_backend.get_session() as irods:
- ret_data = irods_backend.get_objects(irods, self.path)
+ obj_list = irods_backend.get_objects(irods, self.path)
except Exception as ex:
return Response({'detail': str(ex)}, status=400)
- for data_obj in ret_data.get('irods_data', []):
- obj = IrodsDataRequest.objects.filter(
- path=data_obj['path'], status__in=['ACTIVE', 'FAILED']
+ for o in obj_list:
+ db_obj = IrodsDataRequest.objects.filter(
+ path=o['path'], status__in=['ACTIVE', 'FAILED']
).first()
- data_obj['irods_request_status'] = obj.status if obj else None
- data_obj['irods_request_user'] = (
- str(obj.user.sodar_uuid) if obj else None
+ o['irods_request_status'] = db_obj.status if db_obj else None
+ o['irods_request_user'] = (
+ str(db_obj.user.sodar_uuid) if db_obj else None
)
- return Response(ret_data, status=200)
+ return Response({'irods_data': obj_list}, status=200)
class SheetVersionCompareAjaxView(SODARBaseProjectAjaxView):
diff --git a/samplesheets/views_api.py b/samplesheets/views_api.py
index 88498c8d8..7b50f8efb 100644
--- a/samplesheets/views_api.py
+++ b/samplesheets/views_api.py
@@ -185,14 +185,12 @@ def get(self, request, *args, **kwargs):
).first()
if not investigation:
raise NotFound()
-
export_format = 'json'
if self.request.get_full_path() == reverse(
'samplesheets:api_export_zip',
kwargs={'project': project.sodar_uuid},
):
export_format = 'zip'
-
try:
return self.get_isa_export(project, request, export_format)
except Exception as ex:
@@ -588,7 +586,7 @@ class IrodsDataRequestCreateAPIView(
def perform_create(self, serializer):
serializer.save()
# Create timeline event
- self.add_tl_create(serializer.instance)
+ self.add_tl_event(serializer.instance, 'create')
# Add app alerts to owners/delegates
self.add_alerts_create(serializer.instance.project)
@@ -620,7 +618,7 @@ def perform_update(self, serializer):
raise PermissionDenied
serializer.save()
# Add timeline event
- self.add_tl_update(serializer.instance)
+ self.add_tl_event(serializer.instance, 'update')
class IrodsDataRequestDestroyAPIView(
@@ -647,7 +645,7 @@ def perform_destroy(self, instance):
raise PermissionDenied
instance.delete()
# Add timeline event
- self.add_tl_delete(instance)
+ self.add_tl_event(instance, 'delete')
# Handle project alerts
self.handle_alerts_deactivate(instance)
@@ -838,13 +836,13 @@ def get(self, request, *args, **kwargs):
path = irods_backend.get_sample_path(project)
try:
with irods_backend.get_session() as irods:
- irods_data = irods_backend.get_objects(irods, path)
+ obj_list = irods_backend.get_objects(irods, path)
except Exception as ex:
return Response(
{'detail': '{}: {}'.format(IRODS_QUERY_ERROR_MSG, ex)},
status=status.HTTP_404_NOT_FOUND,
)
- return Response(irods_data, status=status.HTTP_200_OK)
+ return Response({'irods_data': obj_list}, status=status.HTTP_200_OK)
# TODO: Temporary HACK, should be replaced by proper API view
diff --git a/samplesheets/vueapp/package-lock.json b/samplesheets/vueapp/package-lock.json
index ee92aecdf..2d26489cc 100644
--- a/samplesheets/vueapp/package-lock.json
+++ b/samplesheets/vueapp/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "samplesheets",
- "version": "0.14.1",
+ "version": "0.14.2",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "samplesheets",
- "version": "0.14.1",
+ "version": "0.14.2",
"dependencies": {
"bootstrap-vue": "^2.22.0",
"core-js": "^3.23.5",
@@ -8125,9 +8125,9 @@
"dev": true
},
"node_modules/follow-redirects": {
- "version": "1.15.3",
- "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz",
- "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==",
+ "version": "1.15.5",
+ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
+ "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==",
"dev": true,
"funding": [
{
@@ -23401,9 +23401,9 @@
"dev": true
},
"follow-redirects": {
- "version": "1.15.3",
- "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz",
- "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==",
+ "version": "1.15.5",
+ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
+ "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==",
"dev": true
},
"for-each": {
diff --git a/samplesheets/vueapp/package.json b/samplesheets/vueapp/package.json
index 70d4b4558..c3f264f6e 100644
--- a/samplesheets/vueapp/package.json
+++ b/samplesheets/vueapp/package.json
@@ -1,6 +1,6 @@
{
"name": "samplesheets",
- "version": "0.14.1",
+ "version": "0.14.2",
"private": true,
"scripts": {
"serve": "vue-cli-service serve",
diff --git a/samplesheets/vueapp/src/App.vue b/samplesheets/vueapp/src/App.vue
index ce49b0a2b..497698d51 100644
--- a/samplesheets/vueapp/src/App.vue
+++ b/samplesheets/vueapp/src/App.vue
@@ -759,19 +759,18 @@ export default {
let enableNextIdx = null
// If the next node is a material, enable editing its name
- // Else if it's a process, enable editing for all cells (if available)
if (nextNodeCls === 'GenericMaterial') {
const itemType = cols[startIdx].colDef.cellEditorParams.headerInfo.item_type
const headerType = cols[startIdx].colDef.cellEditorParams.headerInfo.header_type
let value = this.getDefaultValue(nextColId, gridOptions, true)
// If default name suffix is set, fill name, enable node and continue
+ // TODO: Remove repetition with named process (see below)
if (headerType === 'name' &&
!(['SOURCE', 'DATA'].includes(itemType)) &&
value.value) {
value.value = this.getNamePrefix(rowNode, cols, startIdx) + value.value
let createNew = true
-
// Check if name already appears in column, update UUID if found
gridOptions.api.forEachNode(function (r) {
if (r.data[nextColId].value === value.value) {
@@ -779,7 +778,6 @@ export default {
value.uuid = r.data[nextColId].uuid
}
})
-
value.newInit = false
value.editable = true
rowNode.setDataValue(nextColId, value)
@@ -810,7 +808,7 @@ export default {
rowNode.setDataValue(cols[i].colId, value)
}
}
- } else if (nextNodeCls === 'Process') {
+ } else if (nextNodeCls === 'Process') { // Handle process node enabling
let i = startIdx
let processActive = false
let newInit = true
@@ -821,9 +819,10 @@ export default {
nextColId = cols[i].colId
const value = this.getDefaultValue(nextColId, gridOptions, newInit, forceEmpty)
const headerType = cols[i].colDef.cellEditorParams.headerInfo.header_type
+ const fieldEditable = cols[i].colDef.cellRendererParams.fieldEditable
if (headerType === 'protocol') {
- value.editable = true
+ value.editable = fieldEditable
if ('uuid_ref' in value && value.uuid_ref) {
processActive = true
newInit = false
@@ -831,24 +830,41 @@ export default {
} else forceEmpty = true
} else if (headerType === 'process_name') {
// Fill process name with default suffix if set
+ // TODO: Remove repetition with material name
const namePrefix = this.getNamePrefix(rowNode, cols, i)
if (namePrefix && value.value) {
value.value = namePrefix + value.value
+ let createNew = true
+ // Check if name already appears in column, update UUID if found
+ gridOptions.api.forEachNode(function (r) {
+ if (r.data[nextColId].value === value.value) {
+ createNew = false
+ value.uuid = r.data[nextColId].uuid
+ }
+ })
newInit = false
value.newInit = false
processActive = true
+ rowNode.setDataValue(nextColId, value)
+ this.handleNodeUpdate(
+ value,
+ cols[i],
+ rowNode,
+ gridOptions,
+ gridUuid,
+ createNew)
+ return
} else value.value = '' // HACK: Reset default value if not filled
value.editable = true // Process name should always be editable
} else {
// Only allow editing the rest of the cells if protocol is set
if (processActive) {
- value.editable = cols[i].colDef.cellRendererParams.fieldEditable
+ value.editable = fieldEditable
} else value.editable = false
}
rowNode.setDataValue(nextColId, value)
i += 1
}
-
// If default protocol or name was filled, enable the next node(s) too
if (processActive) enableNextIdx = i
}
@@ -860,14 +876,13 @@ export default {
},
handleNodeUpdate (
- firstCellValue, column, rowNode, gridOptions, gridUuid, createNew
+ nameCellValue, column, rowNode, gridOptions, gridUuid, createNew
) {
// console.log('handleNodeUpdate() called; colId=' + column.colId) // DEBUG
const gridApi = gridOptions.api
const columnApi = gridOptions.columnApi
- const firstColId = column.colId // ID of the identifying node column
+ const nameColId = column.colId // ID of the identifying node column
let assayMode = false
-
if (gridUuid in this.sodarContext.studies[this.currentStudyUuid].assays) {
assayMode = true
}
@@ -879,23 +894,22 @@ export default {
const studyCols = studyOptions.columnApi.getColumns()
let studyCopyRow = null
const sampleColId = this.sampleColId
-
// Get sample row from study table
studyApi.forEachNode(function (rowNode) {
if (!studyCopyRow &&
- rowNode.data[sampleColId].uuid === firstCellValue.uuid_ref) {
+ rowNode.data[sampleColId].uuid === nameCellValue.uuid_ref) {
studyCopyRow = rowNode
}
})
-
- // Fill in preceeding nodes
+ // Fill in preceeding nodes for sample
for (let i = 1; i < this.sampleIdx; i++) {
// TODO: Create a generic helper for copying
const copyColId = studyCols[i].colId
const copyData = Object.assign(studyCopyRow.data[copyColId])
copyData.newRow = true
copyData.newInit = false
- copyData.editable = columnApi.getColumn(copyColId).colDef.cellRendererParams.fieldEditable
+ copyData.editable = columnApi.getColumn(
+ copyColId).colDef.cellRendererParams.fieldEditable
rowNode.setDataValue(copyColId, copyData)
}
}
@@ -909,55 +923,64 @@ export default {
let groupId = parent.groupId
if (groupId === '1' && this.sourceColSpan > 1) groupId = '2'
let startIdx
-
for (let i = 1; i < cols.length - 1; i++) {
- if (cols[i].colId === firstColId) {
+ if (cols[i].colId === nameColId) {
startIdx = i + 1
break
}
}
-
for (let i = startIdx; i < cols.length - 1; i++) {
const col = cols[i]
// NOTE: Must use originalParent to work with hidden columns
if (col.originalParent.groupId === groupId) {
nodeCols.push(col)
- } else if (col.colId !== firstColId &&
+ } else if (col.colId !== nameColId &&
col.originalParent.groupId !== groupId) {
nextNodeStartIdx = i
break
}
}
- // IF node is new THEN fill out other cells with default/empty values
+ // If the node is new, fill out other cells with default/empty values
if (createNew) {
for (let i = 0; i < nodeCols.length; i++) {
const newColId = nodeCols[i].colId
const value = this.getDefaultValue(nodeCols[i].colId, gridOptions)
rowNode.setDataValue(newColId, value)
}
- } else { // ELSE set UUIDs and update cell values (only in the same table)
+ } else { // Else set UUIDs and update cell values (only in the same table)
let copyRowNode = null
gridApi.forEachNode(function (rowNode) {
- if (!copyRowNode && rowNode.data[firstColId].value === firstCellValue.value) {
+ if (!copyRowNode &&
+ rowNode.data[nameColId].value === nameCellValue.value) {
copyRowNode = rowNode
}
})
-
for (let i = 0; i < nodeCols.length; i++) {
const copyColId = nodeCols[i].colId
const copyData = Object.assign(copyRowNode.data[copyColId])
copyData.newInit = false
- copyData.editable = columnApi.getColumn(copyColId).colDef.cellRendererParams.fieldEditable
+ copyData.editable = columnApi.getColumn(
+ copyColId).colDef.cellRendererParams.fieldEditable
rowNode.setDataValue(copyColId, copyData)
}
}
+ // When updating protocol name column, update preceeding protocol UUID
+ if (startIdx > 2) {
+ const headerType = column.colDef.cellEditorParams.headerInfo.header_type
+ const prevCol = cols[startIdx - 2]
+ const prevHeaderType = prevCol.colDef.cellEditorParams.headerInfo.header_type
+ if (headerType === 'process_name' &&
+ prevHeaderType === 'protocol' &&
+ cols[startIdx - 2].originalParent.groupId === groupId) {
+ rowNode.data[prevCol.colId].uuid = rowNode.data[nameColId].uuid
+ }
+ }
// Enable the next node(s), if we are initializing node for the 1st time
if (nextNodeStartIdx) {
this.enableNextNodes(rowNode, gridOptions, gridUuid, nextNodeStartIdx)
}
-
// Redraw row node for all changes to be displayed
gridApi.redrawRows({ rows: [rowNode] })
},
@@ -1033,10 +1056,14 @@ export default {
const lastSourceGroupId = cols[lastSourceGroupIdx].originalParent.groupId
let groupId = cols[1].originalParent.groupId
- // Modify starting index and group id for assay table updating
- if (assayMode) {
+ if (assayMode) { // Assay table modifications
+ // Modify starting index and group id for assay table
startIdx = this.sampleIdx
groupId = cols[startIdx].originalParent.groupId
+ // Update newRow for source cells
+ for (let i = 1; i < startIdx; i++) {
+ rowNode.data[cols[i].colId].newRow = false
+ }
}
// Set cell data to match an existing row
@@ -1047,13 +1074,11 @@ export default {
value.newInit = false
value.editable = cols[i].colDef.cellRendererParams.fieldEditable
rowNode.setDataValue(cols[i].colId, value)
-
// Save sample info if new sample was added in study
if (!assayMode && cols[i].colId === this.sampleColId) {
sampleUuid = data.node_uuids[nodeIdx]
sampleName = rowNode.data[cols[i].colId].value
}
-
if (i < cols.length - 2 &&
groupId !== cols[i + 1].originalParent.groupId &&
cols[i + 1].originalParent.groupId !== lastSourceGroupId) {
@@ -1091,7 +1116,6 @@ export default {
console.log('Row insert status: ' + data.detail) // DEBUG
this.showNotification('Insert Failed', 'danger', 1000)
}
-
finishCallback()
this.updatingRow = false
}
diff --git a/samplesheets/vueapp/src/components/editors/DataCellEditor.vue b/samplesheets/vueapp/src/components/editors/DataCellEditor.vue
index 63afc5d08..500323ebc 100644
--- a/samplesheets/vueapp/src/components/editors/DataCellEditor.vue
+++ b/samplesheets/vueapp/src/components/editors/DataCellEditor.vue
@@ -88,6 +88,7 @@ export default Vue.extend({
nameValues: [],
nameUuids: {},
sampleColId: null,
+ nameColumn: false,
destroyCalled: false // HACK for issue #869
}
},
@@ -138,7 +139,7 @@ export default Vue.extend({
return ''
},
getValidState () {
- if (this.headerInfo.header_type === 'name') { // Name is a special case
+ if (this.nameColumn) { // Name is a special case
// TODO: Cleanup/simplify
// NOTE: Empty value is allowed for DATA materials
if ((this.editValue.length === 0 &&
@@ -233,7 +234,6 @@ export default Vue.extend({
created () {
this.app = this.params.app
this.gridOptions = this.app.getGridOptionsByUuid(this.params.gridUuid)
-
// Cancel editing if editingCell is true
if (this.app.editingCell) {
this.editAllowed = false
@@ -248,18 +248,18 @@ export default Vue.extend({
this.isValueArray = true
this.editValue = this.editValue.join('; ')
}
-
if (this.value.value) {
this.ogEditValue = Object.assign(this.value.value)
} else {
this.ogEditValue = null
}
-
this.headerInfo = this.params.headerInfo
this.renderInfo = this.params.renderInfo
this.editConfig = this.params.editConfig
this.sampleColId = this.params.sampleColId
-
+ if (['name', 'process_name'].includes(this.headerInfo.header_type)) {
+ this.nameColumn = true
+ }
// console.log('Edit colId/field: ' + this.params.colDef.field) // DEBUG
// Set up unit value
@@ -322,7 +322,7 @@ export default Vue.extend({
}
// Special setup for the name column
- if (this.headerInfo.header_type === 'name') {
+ if (this.nameColumn) {
if (this.value.newRow) this.containerTitle = 'Enter name of new or existing node'
else this.containerTitle = 'Rename node'
// If name, get other current values for comparison in validation
@@ -360,20 +360,6 @@ export default Vue.extend({
this.value.value[i] = this.value.value[i].trim()
}
}
- // Check if we're in a named process without a protocol
- let namedProcess = false
- if (this.headerInfo.header_type === 'process_name') {
- namedProcess = true
- const groupId = this.params.column.originalParent.groupId
- const cols = this.gridOptions.columnApi.getColumns()
- for (let i = 1; i < cols.length - 1; i++) {
- if (cols[i].originalParent.groupId === groupId &&
- cols[i].colDef.cellEditorParams.headerInfo.header_type === 'protocol') {
- namedProcess = false
- break
- }
- }
- }
// Reject invalid value
if (!this.valid) {
@@ -385,7 +371,7 @@ export default Vue.extend({
}
// Confirm renaming node into an existing node and overwriting values
- if ((this.headerInfo.header_type === 'name' || namedProcess) &&
+ if ((this.nameColumn) &&
this.value.newRow &&
this.ogEditValue &&
this.nameValues.includes(this.editValue)) {
@@ -402,12 +388,11 @@ export default Vue.extend({
}
// Proceed with setting values and saving
- if ((this.headerInfo.header_type === 'name' || namedProcess) &&
- (!this.value.uuid || this.value.newRow)) {
+ if (this.nameColumn && (!this.value.uuid || this.value.newRow)) {
// Set UUID if we are referring to an existing node (only if material)
- if (!namedProcess && this.nameValues.includes(this.value.value)) {
+ if (this.nameValues.includes(this.value.value)) {
this.value.uuid = this.nameUuids[this.value.value]
- } else if (!namedProcess) {
+ } else {
// Clear UUID in case user switched from existing to new while editing
this.value.uuid = null
}
@@ -419,6 +404,7 @@ export default Vue.extend({
else this.value.unit = this.editUnit
// Handle updating/initiating node
+ // TODO: Ensure this works also if setting process name value to empty
this.app.handleNodeUpdate(
this.value,
this.params.column,
@@ -434,7 +420,6 @@ export default Vue.extend({
// Update cell (only if we already have the UUID!)
if (this.value.uuid) {
this.app.handleCellEdit(this.getUpdateData(), true)
-
// If a sample has been renamed, update sample list for assay
if (this.headerInfo.header_type === 'name' &&
this.params.colDef.field === this.sampleColId) {
diff --git a/samplesheets/vueapp/src/components/renderers/RowEditRenderer.vue b/samplesheets/vueapp/src/components/renderers/RowEditRenderer.vue
index 62062a91d..2afe09081 100644
--- a/samplesheets/vueapp/src/components/renderers/RowEditRenderer.vue
+++ b/samplesheets/vueapp/src/components/renderers/RowEditRenderer.vue
@@ -313,23 +313,17 @@ export default Vue.extend({
let groupId = cols[startIdx].originalParent.groupId
let fieldIdx = 0 // Field index within node
let nodeCells = []
- let nodeClass
+ // let nodeClass
let nodeStartIdx = startIdx
for (let i = startIdx; i < cols.length - 1; i++) {
- if (fieldIdx === 0) {
- nodeStartIdx = i
- nodeClass = cols[i].colDef.cellEditorParams.headerInfo.obj_cls
- }
-
+ if (fieldIdx === 0) nodeStartIdx = i
// Add cells for new nodes, only the first node for existing ones
- if (nodeClass === 'Process' ||
- fieldIdx === 0 ||
+ if (fieldIdx === 0 ||
(!this.rowNode.data[cols[i].colId].uuid &&
!(this.assayMode && inSampleNode))) {
nodeCells.push(this.getCellData(cols[i]))
}
-
if (i === cols.length - 1 ||
groupId !== cols[i + 1].originalParent.groupId) {
groupId = cols[i + 1].originalParent.groupId
diff --git a/samplesheets/vueapp/tests/unit/DataCellEditor.spec.js b/samplesheets/vueapp/tests/unit/DataCellEditor.spec.js
index 3a7902bd3..f658190b1 100644
--- a/samplesheets/vueapp/tests/unit/DataCellEditor.spec.js
+++ b/samplesheets/vueapp/tests/unit/DataCellEditor.spec.js
@@ -1,6 +1,7 @@
import { createLocalVue, mount } from '@vue/test-utils'
import {
studyUuid,
+ assayUuid,
copy,
getAppStub,
getSheetTableComponents,
@@ -819,6 +820,53 @@ describe('DataCellEditor.vue', () => {
expect(wrapper.find('.sodar-ss-data').text()).toBe('-')
})
+ it('updates value in process assay name column', async () => {
+ const value = '0815-N1-DNA1-WES1-UPDATED'
+ const table = copy(studyTablesOneCol).tables.assays[assayUuid]
+ table.field_header[0] = studyTablesEdit.tables.assays[assayUuid].field_header[13]
+ table.table_data[0][0] = copy(studyTablesEdit).tables.assays[assayUuid].table_data[0][13]
+ const wrapper = mountSheetTable({
+ table: table,
+ editStudyConfig: {
+ nodes: [{ fields: [studyEditConfig.assays[assayUuid].nodes[2].fields[1]] }]
+ }
+ })
+ await waitAG(wrapper)
+ await waitRAF()
+
+ await wrapper.find('.sodar-ss-data-cell').trigger('dblclick')
+ const input = wrapper.find('.ag-cell-edit-input')
+ input.element.value = value
+ await input.trigger('input')
+ await gridOptions.api.stopEditing()
+ expect(wrapper.find('.ag-cell-edit-input').exists()).toBe(false)
+ expect(wrapper.find('.sodar-ss-data').text()).toBe(value)
+ })
+
+ it('updates process assay name column with empty value', async () => {
+ const value = '' // NOTE: For named processes this IS allowed
+ const table = copy(studyTablesOneCol).tables.assays[assayUuid]
+ table.field_header[0] = studyTablesEdit.tables.assays[assayUuid].field_header[13]
+ table.table_data[0][0] = copy(studyTablesEdit).tables.assays[assayUuid].table_data[0][13]
+ const wrapper = mountSheetTable({
+ table: table,
+ editStudyConfig: {
+ nodes: [{ fields: [studyEditConfig.assays[assayUuid].nodes[2].fields[1]] }]
+ }
+ })
+ await waitAG(wrapper)
+ await waitRAF()
+
+ await wrapper.find('.sodar-ss-data-cell').trigger('dblclick')
+ const input = wrapper.find('.ag-cell-edit-input')
+ input.element.value = value
+ await input.trigger('input')
+ await gridOptions.api.stopEditing()
+ expect(wrapper.find('.ag-cell-edit-input').exists()).toBe(false)
+ expect(wrapper.find('.sodar-ss-data').text()).toBe('-') // Empty value
+ })
+
+ // TODO: Test renaming to assay name already existing in table for new/old row
// TODO: Test update propagation for multiple rows
// TODO: Test value updating for new rows
})
diff --git a/taskflowbackend/api.py b/taskflowbackend/api.py
index 6585cf2c4..60a47d36f 100644
--- a/taskflowbackend/api.py
+++ b/taskflowbackend/api.py
@@ -8,6 +8,7 @@
ZONE_STATUS_NOT_CREATED,
ZONE_STATUS_CREATING,
ZONE_STATUS_FAILED,
+ STATUS_FINISHED,
)
from landingzones.models import LandingZone
@@ -62,6 +63,29 @@ def _raise_flow_exception(cls, ex_msg, tl_event=None, zone=None):
# TODO: Create app alert for failure if async (see #1499)
raise cls.FlowSubmitException(ex_msg)
+ @classmethod
+ def _raise_lock_exception(cls, ex_msg, tl_event=None, zone=None):
+ """
+ Raise exception specifically for project lock errors. Updates the status
+ of the landing zone only if zone has not yet been finished by a
+ concurrent taskflow.
+
+ :param ex_msg: Exception message (string)
+ :param tl_event: Timeline event or None
+ :param zone: LandingZone object or None
+ :raise: FlowSubmitException
+ """
+ lock_zone = None
+ if zone:
+ zone.refresh_from_db()
+ if zone.status not in STATUS_FINISHED:
+ lock_zone = zone # Exclude zone if already finished (see #1909)
+ cls._raise_flow_exception(
+ LOCK_FAIL_MSG + ': ' + str(ex_msg),
+ tl_event,
+ lock_zone,
+ )
+
@classmethod
def get_flow(
cls,
@@ -125,7 +149,7 @@ def run_flow(
ex_msg = None
coordinator = None
lock = None
- # Get zone if present in flow
+ # Get landing zone if present in flow
zone = None
if flow.flow_data.get('zone_uuid'):
zone = LandingZone.objects.filter(
@@ -137,10 +161,8 @@ def run_flow(
# Acquire lock
coordinator = lock_api.get_coordinator()
if not coordinator:
- cls._raise_flow_exception(
- LOCK_FAIL_MSG + ': Failed to retrieve lock coordinator',
- tl_event,
- zone,
+ cls._raise_lock_exception(
+ 'Failed to retrieve lock coordinator', tl_event, zone
)
else:
lock_id = str(project.sodar_uuid)
@@ -148,11 +170,7 @@ def run_flow(
try:
lock_api.acquire(lock)
except Exception as ex:
- cls._raise_flow_exception(
- LOCK_FAIL_MSG + ': {}'.format(ex),
- tl_event,
- zone,
- )
+ cls._raise_lock_exception(str(ex), tl_event, zone)
else:
logger.info('Lock not required (flow.require_lock=False)')
@@ -185,9 +203,18 @@ def run_flow(
# Raise exception if failed, otherwise return result
if ex_msg:
- logger.error(ex_msg) # TODO: Isn't this redundant?
- # NOTE: Not providing zone here since it's handled by flow
- cls._raise_flow_exception(ex_msg, tl_event, None)
+ logger.error(ex_msg)
+ # Provide landing zone if error occurs but status has not been set
+ # (This means a failure has not been properly handled in the flow)
+ ex_zone = None
+ if zone:
+ zone.refresh_from_db()
+ if zone.status not in [
+ ZONE_STATUS_NOT_CREATED,
+ ZONE_STATUS_FAILED,
+ ]:
+ ex_zone = zone
+ cls._raise_flow_exception(ex_msg, tl_event, ex_zone)
return flow_result
def submit(
diff --git a/taskflowbackend/flows/landing_zone_create.py b/taskflowbackend/flows/landing_zone_create.py
index 94550cc30..b9a87948e 100644
--- a/taskflowbackend/flows/landing_zone_create.py
+++ b/taskflowbackend/flows/landing_zone_create.py
@@ -154,26 +154,30 @@ def build(self, force_fail=False):
)
)
# Create collections
- for d in self.flow_data['colls']:
- coll_path = os.path.join(zone_path, d)
+ if self.flow_data['colls']:
+ colls_full_path = [
+ os.path.join(zone_path, c) for c in self.flow_data['colls']
+ ]
+ coll_count = len(self.flow_data['colls'])
self.add_task(
- irods_tasks.CreateCollectionTask(
- name='Create collection {}'.format(coll_path),
+ irods_tasks.BatchCreateCollectionsTask(
+ name='Batch create {} collection{}'.format(
+ coll_count, 's' if coll_count != 1 else ''
+ ),
irods=self.irods,
- inject={'path': coll_path},
+ inject={'paths': colls_full_path},
)
)
# Enforce collection access if set
if self.flow_data['restrict_colls']:
self.add_task(
- irods_tasks.SetAccessTask(
- name='Set user owner access to collection {}'.format(
- coll_path
- ),
+ irods_tasks.BatchSetAccessTask(
+ name='Batch set user owner access to created '
+ 'collections',
irods=self.irods,
inject={
'access_name': 'own',
- 'path': coll_path,
+ 'paths': colls_full_path,
'user_name': zone.user.username,
'access_lookup': access_lookup,
'irods_backend': self.irods_backend,
diff --git a/taskflowbackend/flows/landing_zone_delete.py b/taskflowbackend/flows/landing_zone_delete.py
index 39fa04ed4..373e9ea69 100644
--- a/taskflowbackend/flows/landing_zone_delete.py
+++ b/taskflowbackend/flows/landing_zone_delete.py
@@ -54,7 +54,7 @@ def build(self, force_fail=False):
inject={'path': zone_path},
)
)
- # Set zone status to DELETING
+ # Set zone status to DELETED
self.add_task(
lz_tasks.SetLandingZoneStatusTask(
name='Set landing zone status to DELETED',
diff --git a/taskflowbackend/flows/landing_zone_move.py b/taskflowbackend/flows/landing_zone_move.py
index 217266d54..9e40d7715 100644
--- a/taskflowbackend/flows/landing_zone_move.py
+++ b/taskflowbackend/flows/landing_zone_move.py
@@ -12,15 +12,11 @@
from landingzones.models import LandingZone
from taskflowbackend.flows.base_flow import BaseLinearFlow
-from taskflowbackend.irods_utils import (
- get_subcoll_obj_paths,
- get_subcoll_paths,
-)
from taskflowbackend.tasks import irods_tasks
SAMPLE_COLL = settings.IRODS_SAMPLE_COLL
-ZONE_INFO_CALC = 'Finding and calculating missing checksums in iRODS'
+ZONE_INFO_CALC = 'Calculating missing checksums in iRODS'
ZONE_INFO_VALIDATE = 'Validating {count} file{plural}'
ZONE_INFO_READ_ONLY = ', write access disabled'
@@ -37,7 +33,6 @@ def validate(self):
return super().validate()
def build(self, force_fail=False):
- # Setup
validate_only = self.flow_data.get('validate_only', False)
zone = LandingZone.objects.get(sodar_uuid=self.flow_data['zone_uuid'])
project_group = self.irods_backend.get_user_group_name(self.project)
@@ -54,39 +49,32 @@ def build(self, force_fail=False):
),
)
- # Get landing zone file paths (without .md5 files) from iRODS
- zone_coll = self.irods.collections.get(zone_path)
- zone_objects = get_subcoll_obj_paths(zone_coll)
- zone_objects_nomd5 = list(
- set(
- [
- p
- for p in zone_objects
- if p[p.rfind('.') + 1 :].lower() != 'md5'
- ]
- )
- )
- zone_objects_md5 = list(
- set([p for p in zone_objects if p not in zone_objects_nomd5])
+ # Get landing zone data object and collection paths from iRODS
+ zone_all = self.irods_backend.get_objects(
+ self.irods, zone_path, include_md5=True, include_colls=True
)
+ zone_objects = [o['path'] for o in zone_all if o['type'] == 'obj']
+ zone_objects_nomd5 = [
+ p for p in zone_objects if p[p.rfind('.') + 1 :].lower() != 'md5'
+ ]
+ zone_objects_md5 = [
+ p for p in zone_objects if p not in zone_objects_nomd5
+ ]
file_count = len(zone_objects_nomd5)
file_count_msg_plural = 's' if file_count != 1 else ''
# Get all collections with root path
zone_all_colls = [zone_path]
- zone_all_colls += get_subcoll_paths(zone_coll)
+ zone_all_colls += [o['path'] for o in zone_all if o['type'] == 'coll']
# Get list of collections containing files (ignore empty colls)
zone_object_colls = list(set([p[: p.rfind('/')] for p in zone_objects]))
- # Convert these to collections inside sample collection
- sample_colls = list(
- set(
- [
- sample_path + '/' + '/'.join(p.split('/')[10:])
- for p in zone_object_colls
- if len(p.split('/')) > 10
- ]
- )
- )
+ # Convert paths to collections inside sample collection
+ zone_path_len = len(zone_path.split('/'))
+ sample_colls = [
+ sample_path + '/' + '/'.join(p.split('/')[zone_path_len:])
+ for p in zone_object_colls
+ if len(p.split('/')) > zone_path_len
+ ]
# print('sample_path: {}'.format(sample_path)) # DEBUG
# print('zone_objects: {}'.format(zone_objects)) # DEBUG
diff --git a/taskflowbackend/irods_utils.py b/taskflowbackend/irods_utils.py
index bf26b0819..58921af1c 100644
--- a/taskflowbackend/irods_utils.py
+++ b/taskflowbackend/irods_utils.py
@@ -1,28 +1,6 @@
"""iRODS utilities for the taskflowbackend app"""
-def get_subcoll_obj_paths(coll):
- """
- Return paths to all files within collection and its subcollections
- recursively.
- """
- ret = []
- for sub_coll in coll.subcollections:
- ret += get_subcoll_obj_paths(sub_coll)
- for data_obj in coll.data_objects:
- ret.append(data_obj.path)
- return ret
-
-
-def get_subcoll_paths(coll):
- """Return paths to all subcollections within collection recursively"""
- ret = []
- for sub_coll in coll.subcollections:
- ret.append(sub_coll.path)
- ret += get_subcoll_paths(sub_coll)
- return ret
-
-
def get_batch_role(project, user_name):
"""
Return role dict for use with e.g. the role_update_irods_batch flow.
diff --git a/taskflowbackend/plugins.py b/taskflowbackend/plugins.py
index ebf7e6717..db0e69f50 100644
--- a/taskflowbackend/plugins.py
+++ b/taskflowbackend/plugins.py
@@ -34,9 +34,14 @@
# Local constants
APP_NAME = 'taskflowbackend'
-TL_SUBMIT_DESC = 'Job submitted to Taskflow'
IRODS_CAT_SKIP_MSG = 'Categories are not synchronized into iRODS'
RANK_FINDER = ROLE_RANKING[PROJECT_ROLE_FINDER]
+TASKFLOW_INFO_SETTINGS = [
+ 'TASKFLOW_IRODS_CONN_TIMEOUT',
+ 'TASKFLOW_LOCK_RETRY_COUNT',
+ 'TASKFLOW_LOCK_RETRY_INTERVAL',
+]
+TL_SUBMIT_DESC = 'Job submitted to Taskflow'
class BackendPlugin(ProjectModifyPluginMixin, BackendPluginPoint):
@@ -54,6 +59,9 @@ class BackendPlugin(ProjectModifyPluginMixin, BackendPluginPoint):
#: Description string
description = 'SODAR Taskflow backend for iRODS data transactions'
+ #: Names of plugin specific Django settings to display in siteinfo
+ info_settings = TASKFLOW_INFO_SETTINGS
+
# Internal helpers ---------------------------------------------------------
@classmethod
diff --git a/taskflowbackend/tasks/irods_tasks.py b/taskflowbackend/tasks/irods_tasks.py
index 5f60bc204..ee9319279 100644
--- a/taskflowbackend/tasks/irods_tasks.py
+++ b/taskflowbackend/tasks/irods_tasks.py
@@ -26,6 +26,96 @@
MD5_RE = re.compile(r'([^\w.])')
+# Mixins -----------------------------------------------------------------------
+
+
+class IrodsAccessMixin:
+ """Mixin for iRODS access helpers"""
+
+ def execute_set_access(
+ self,
+ access_name,
+ path,
+ user_name,
+ access_lookup,
+ obj_target,
+ recursive,
+ ):
+ """
+ Set access for user in a single data object or collection.
+
+ :param access_name: Access level to set (string)
+ :param path: Full iRODS path to collection or data object (string)
+ :param user_name: Name of user or group (string)
+ :param access_lookup: Access level lookup for iRODS compatibility (dict)
+ :param obj_target: Whether target is a data object (boolean)
+ :param recursive: Set collection access recursively if True (boolean)
+ """
+ if not self.execute_data.get('access_names'):
+ self.execute_data['access_names'] = {}
+ if obj_target:
+ target = self.irods.data_objects.get(path)
+ recursive = False
+ else:
+ target = self.irods.collections.get(path)
+ recursive = recursive
+ target_access = self.irods.permissions.get(target=target)
+
+ user_access = next(
+ (x for x in target_access if x.user_name == user_name), None
+ )
+ modifying_data = False
+ if (
+ user_access
+ and user_access.access_name != access_lookup[access_name]
+ ):
+ self.execute_data['access_names'][path] = access_lookup[
+ user_access.access_name
+ ]
+ modifying_data = True
+ elif not user_access:
+ self.execute_data['access_names'][path] = 'null'
+ modifying_data = True
+
+ if modifying_data:
+ acl = iRODSAccess(
+ access_name=access_name,
+ path=path,
+ user_name=user_name,
+ user_zone=self.irods.zone,
+ )
+ self.irods.permissions.set(acl, recursive=recursive)
+ self.data_modified = True # Access was modified
+
+ def revert_set_access(
+ self,
+ path,
+ user_name,
+ obj_target,
+ recursive,
+ ):
+ """
+ Revert setting access for user in a single collection or data object.
+
+ :param path: Full iRODS path to collection or data object (string)
+ :param user_name: Name of user or group (string)
+ :param obj_target: Whether target is a data object (boolean)
+ :param recursive: Set collection access recursively if True (boolean)
+ """
+ if self.data_modified:
+ acl = iRODSAccess(
+ access_name=self.execute_data['access_names'][path],
+ path=path,
+ user_name=user_name,
+ user_zone=self.irods.zone,
+ )
+ recursive = False if obj_target else recursive
+ self.irods.permissions.set(acl, recursive=recursive)
+
+
+# Base Task --------------------------------------------------------------------
+
+
class IrodsBaseTask(BaseTask):
"""Base iRODS task"""
@@ -51,6 +141,9 @@ def _raise_irods_exception(self, ex, info=None):
raise Exception(desc)
+# Tasks ------------------------------------------------------------------------
+
+
class CreateCollectionTask(IrodsBaseTask):
"""
Create collection and its parent collections if they doesn't exist (imkdir)
@@ -274,7 +367,7 @@ def revert(self, path, inherit=True, *args, **kwargs):
'''
-class SetAccessTask(IrodsBaseTask):
+class SetAccessTask(IrodsAccessMixin, IrodsBaseTask):
"""
Set user/group access to target (ichmod). If the target is a data object
(obj_target=True), the recursive argument will be ignored.
@@ -292,44 +385,17 @@ def execute(
*args,
**kwargs
):
- if obj_target:
- target = self.irods.data_objects.get(path)
- recursive = False
- else:
- target = self.irods.collections.get(path)
- recursive = recursive
-
- target_access = self.irods.permissions.get(target=target)
- user_access = next(
- (x for x in target_access if x.user_name == user_name), None
- )
- if (
- user_access
- and user_access.access_name != access_lookup[access_name]
- ):
- self.execute_data['access_name'] = access_lookup[
- user_access.access_name
- ]
- modifying_data = True
- elif not user_access:
- self.execute_data['access_name'] = 'null'
- modifying_data = True
- else:
- modifying_data = False
-
- if modifying_data:
- acl = iRODSAccess(
- access_name=access_name,
- path=path,
- user_name=user_name,
- user_zone=self.irods.zone,
+ try:
+ self.execute_set_access(
+ access_name,
+ path,
+ user_name,
+ access_lookup,
+ obj_target,
+ recursive,
)
- try:
- self.irods.permissions.set(acl, recursive=recursive)
- except Exception as ex:
- ex_info = user_name
- self._raise_irods_exception(ex, ex_info)
- self.data_modified = True
+ except Exception as ex:
+ self._raise_irods_exception(ex, user_name)
super().execute(*args, **kwargs)
def revert(
@@ -344,15 +410,10 @@ def revert(
*args,
**kwargs
):
- if self.data_modified:
- acl = iRODSAccess(
- access_name=self.execute_data['access_name'],
- path=path,
- user_name=user_name,
- user_zone=self.irods.zone,
- )
- recursive = False if obj_target else recursive
- self.irods.permissions.set(acl, recursive=recursive)
+ try:
+ self.revert_set_access(path, user_name, obj_target, recursive)
+ except Exception:
+ pass # TODO: Log revert() exceptions?
class IssueTicketTask(IrodsBaseTask):
@@ -495,7 +556,53 @@ def revert(self, src_path, dest_path, *args, **kwargs):
self.irods.data_objects.move(src_path=new_src, dest_path=new_dest)
-# Batch file tasks -------------------------------------------------------------
+# Batch Tasks ------------------------------------------------------------------
+
+
+class BatchSetAccessTask(IrodsAccessMixin, IrodsBaseTask):
+ """
+ Set user/group access to multiple targets (ichmod). If a target is a data
+ object (obj_target=True), the recursive argument will be ignored.
+ """
+
+ def execute(
+ self,
+ access_name,
+ paths,
+ user_name,
+ access_lookup,
+ irods_backend,
+ obj_target=False,
+ recursive=True,
+ *args,
+ **kwargs
+ ):
+ # NOTE: Exception handling is done within execute_set_access()
+ for path in paths:
+ self.execute_set_access(
+ access_name,
+ path,
+ user_name,
+ access_lookup,
+ obj_target,
+ recursive,
+ )
+ super().execute(*args, **kwargs)
+
+ def revert(
+ self,
+ access_name,
+ paths,
+ user_name,
+ access_lookup,
+ irods_backend,
+ obj_target=False,
+ recursive=True,
+ *args,
+ **kwargs
+ ):
+ for path in paths:
+ self.revert_set_access(path, user_name, obj_target, recursive)
class BatchCheckFilesTask(IrodsBaseTask):
@@ -665,6 +772,7 @@ def execute(
'Error getting permissions of target "{}"'.format(target),
)
+ # TODO: Remove repetition, use IrodsAccessMixin
user_access = next(
(x for x in target_access if x.user_name == user_name), None
)
diff --git a/taskflowbackend/tests/base.py b/taskflowbackend/tests/base.py
index 16eee4505..2fa8baee6 100644
--- a/taskflowbackend/tests/base.py
+++ b/taskflowbackend/tests/base.py
@@ -192,19 +192,30 @@ def clear_irods_test_data(cls):
'TASKFLOW_TEST_MODE not True, cleanup command not allowed'
)
irods_backend = get_backend_api('omics_irods')
- projects_root = irods_backend.get_projects_path()
permanent_users = getattr(
settings, 'TASKFLOW_TEST_PERMANENT_USERS', DEFAULT_PERMANENT_USERS
)
# TODO: Remove stuff from user home collections
with irods_backend.get_session() as irods:
- # Remove project folders
+ # Remove SODAR project and root collections
+ rm_kwargs = {'recurse': True, 'force': True}
try:
- irods.collections.remove(
- projects_root, recurse=True, force=True
- )
- logger.debug('Removed projects root: {}'.format(projects_root))
+ if settings.IRODS_ROOT_PATH:
+ sodar_root = '/{}/{}'.format(
+ settings.IRODS_ZONE,
+ settings.IRODS_ROOT_PATH.split('/')[0],
+ )
+ irods.collections.remove(sodar_root, **rm_kwargs)
+ logger.debug(
+ 'Removed root collection: {}'.format(sodar_root)
+ )
+ else:
+ projects_root = irods_backend.get_projects_path()
+ irods.collections.remove(projects_root, **rm_kwargs)
+ logger.debug(
+ 'Removed projects root: {}'.format(projects_root)
+ )
except Exception:
pass # This is OK, the root just wasn't there
# Remove created user groups and users
@@ -236,9 +247,8 @@ def clear_irods_test_data(cls):
)
obj_paths = [
o['path']
- for o in irods_backend.get_objs_recursively(irods, trash_coll)
- + irods_backend.get_objs_recursively(
- irods, trash_coll, md5=True
+ for o in irods_backend.get_objs_recursively(
+ irods, trash_coll, include_md5=True
)
]
for path in obj_paths:
diff --git a/taskflowbackend/tests/test_flows.py b/taskflowbackend/tests/test_flows.py
index 85d57c210..d455ec73e 100644
--- a/taskflowbackend/tests/test_flows.py
+++ b/taskflowbackend/tests/test_flows.py
@@ -80,6 +80,8 @@
UPDATED_TITLE = 'NewTitle'
UPDATED_DESC = 'updated description'
SCRIPT_USER_NAME = 'script_user'
+IRODS_ROOT_PATH = 'sodar/root'
+INVALID_REDIS_URL = 'redis://127.0.0.1:6666/0'
class TaskflowbackendFlowTestBase(TaskflowViewTestBase):
@@ -453,7 +455,7 @@ def setUp(self):
self.make_irods_colls(self.investigation)
def test_delete(self):
- """Test landing_zone_delete with an empty landing zone"""
+ """Test landing_zone_delete with empty landing zone"""
zone = self.make_landing_zone(
title=ZONE_TITLE,
project=self.project,
@@ -560,6 +562,54 @@ def test_delete_files_restrict(self):
self.assertEqual(self.irods.data_objects.exists(obj_path), False)
self.assertEqual(self.irods.collections.exists(zone_path), False)
+ def test_delete_finished(self):
+ """Test landing_zone_delete with finished zone"""
+ # NOTE: This may happen with concurrent requests. See #1909, #1910
+ zone = self.make_landing_zone(
+ title=ZONE_TITLE,
+ project=self.project,
+ user=self.user,
+ assay=self.assay,
+ description=ZONE_DESC,
+ status=ZONE_STATUS_DELETED,
+ )
+ # Do not create in taskflow
+ self.assertEqual(zone.status, ZONE_STATUS_DELETED)
+ flow_data = {'zone_uuid': str(zone.sodar_uuid)}
+ flow = self.taskflow.get_flow(
+ irods_backend=self.irods_backend,
+ project=self.project,
+ flow_name='landing_zone_delete',
+ flow_data=flow_data,
+ )
+ self._build_and_run(flow)
+ zone.refresh_from_db()
+ # This should still be deleted, not failed
+ self.assertEqual(zone.status, ZONE_STATUS_DELETED)
+
+ @override_settings(REDIS_URL=INVALID_REDIS_URL)
+ def test_delete_finished_lock_failure(self):
+ """Test landing_zone_delete with finished zone and lock failure"""
+ zone = self.make_landing_zone(
+ title=ZONE_TITLE,
+ project=self.project,
+ user=self.user,
+ assay=self.assay,
+ description=ZONE_DESC,
+ status=ZONE_STATUS_DELETED,
+ )
+ self.assertEqual(zone.status, ZONE_STATUS_DELETED)
+ flow_data = {'zone_uuid': str(zone.sodar_uuid)}
+ flow = self.taskflow.get_flow(
+ irods_backend=self.irods_backend,
+ project=self.project,
+ flow_name='landing_zone_delete',
+ flow_data=flow_data,
+ )
+ self._build_and_run(flow)
+ zone.refresh_from_db()
+ self.assertEqual(zone.status, ZONE_STATUS_DELETED)
+
class TestLandingZoneMove(
LandingZoneMixin,
@@ -620,8 +670,7 @@ def test_move(self):
)
self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False)
self.assertEqual(
- self.irods.data_objects.exists(sample_obj_path + '.md5'),
- False,
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), False
)
flow_data = {'zone_uuid': str(self.zone.sodar_uuid)}
@@ -644,18 +693,13 @@ def test_move(self):
)
self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True)
self.assertEqual(
- self.irods.data_objects.exists(sample_obj_path + '.md5'),
- True,
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), True
)
self.assert_irods_access(
- self.group_name,
- sample_obj_path,
- self.irods_access_read,
+ self.group_name, sample_obj_path, self.irods_access_read
)
self.assert_irods_access(
- self.group_name,
- sample_obj_path + '.md5',
- self.irods_access_read,
+ self.group_name, sample_obj_path + '.md5', self.irods_access_read
)
def test_move_no_checksum(self):
@@ -678,8 +722,7 @@ def test_move_no_checksum(self):
)
self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False)
self.assertEqual(
- self.irods.data_objects.exists(sample_obj_path + '.md5'),
- False,
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), False
)
self.assertIsNone(obj.replicas[0].checksum)
@@ -698,8 +741,7 @@ def test_move_no_checksum(self):
self.assertEqual(self.irods.collections.exists(self.zone_path), False)
self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True)
self.assertEqual(
- self.irods.data_objects.exists(sample_obj_path + '.md5'),
- True,
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), True
)
obj = self.irods.data_objects.get(sample_obj_path) # Reload object
self.assertIsNotNone(obj.replicas[0].checksum)
@@ -771,8 +813,7 @@ def test_move_coll_exists(self):
sample_obj_path = os.path.join(self.sample_path, COLL_NAME, OBJ_NAME)
self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True)
self.assertEqual(
- self.irods.data_objects.exists(sample_obj_path + '.md5'),
- True,
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), True
)
def test_move_obj_exists(self):
@@ -908,8 +949,7 @@ def test_revert(self):
sample_obj_path = os.path.join(self.sample_path, COLL_NAME, OBJ_NAME)
self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False)
self.assertEqual(
- self.irods.data_objects.exists(sample_obj_path + '.md5'),
- False,
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), False
)
self.assert_irods_access(
self.user.username, zone_coll, IRODS_ACCESS_OWN
@@ -956,8 +996,7 @@ def test_move_restrict(self):
)
self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False)
self.assertEqual(
- self.irods.data_objects.exists(sample_obj_path + '.md5'),
- False,
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), False
)
flow_data = {'zone_uuid': str(new_zone.sodar_uuid)}
@@ -981,18 +1020,116 @@ def test_move_restrict(self):
)
self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True)
self.assertEqual(
- self.irods.data_objects.exists(sample_obj_path + '.md5'),
- True,
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), True
)
self.assert_irods_access(
- self.group_name,
- sample_obj_path,
- self.irods_access_read,
+ self.group_name, sample_obj_path, self.irods_access_read
)
self.assert_irods_access(
- self.group_name,
- sample_obj_path + '.md5',
- self.irods_access_read,
+ self.group_name, sample_obj_path + '.md5', self.irods_access_read
+ )
+
+
+@override_settings(IRODS_ROOT_PATH=IRODS_ROOT_PATH)
+class TestLandingZoneMoveAltRootPath(
+ LandingZoneMixin,
+ LandingZoneTaskflowMixin,
+ SampleSheetIOMixin,
+ SampleSheetTaskflowMixin,
+ TaskflowbackendFlowTestBase,
+):
+ """Tests for the landing_zone_move flow with IRODS_ROOT_PATH set"""
+
+ def setUp(self):
+ super().setUp()
+ self.project, self.owner_as = self.make_project_taskflow(
+ 'NewProject', PROJECT_TYPE_PROJECT, self.category, self.user
+ )
+ self.group_name = self.irods_backend.get_user_group_name(self.project)
+ self.project_path = self.irods_backend.get_path(self.project)
+ # Import investigation
+ self.investigation = self.import_isa_from_file(SHEET_PATH, self.project)
+ self.study = self.investigation.studies.first()
+ self.assay = self.study.assays.first()
+ # Create iRODS collections
+ self.make_irods_colls(self.investigation)
+ # Create zone
+ self.zone = self.make_landing_zone(
+ title=ZONE_TITLE,
+ project=self.project,
+ user=self.user,
+ assay=self.assay,
+ description=ZONE_DESC,
+ status=ZONE_STATUS_CREATING,
+ )
+ self.make_zone_taskflow(self.zone)
+ self.sample_path = self.irods_backend.get_path(self.assay)
+ self.zone_path = self.irods_backend.get_path(self.zone)
+ self.group_name = self.irods_backend.get_user_group_name(self.project)
+
+ def test_move_alt_root_path(self):
+ """Test landing_zone_move with IRODS_ROOT_PATH set"""
+ # Assert alt path have been set correctly and returned for all paths
+ root_path = self.irods_backend.get_root_path()
+ self.assertEqual(
+ root_path, '/{}/{}'.format(settings.IRODS_ZONE, IRODS_ROOT_PATH)
+ )
+ self.assertTrue(self.project_path.startswith(root_path))
+ self.assertTrue(self.sample_path.startswith(root_path))
+ self.assertTrue(self.zone_path.startswith(root_path))
+ self.assertEqual(self.irods.collections.exists(self.project_path), True)
+ self.assertEqual(self.irods.collections.exists(self.sample_path), True)
+ self.assertEqual(self.irods.collections.exists(self.zone_path), True)
+
+ self.assertEqual(self.zone.status, ZONE_STATUS_ACTIVE)
+ empty_coll_path = os.path.join(self.zone_path, COLL_NAME)
+ self.irods.collections.create(empty_coll_path)
+ obj_coll_path = os.path.join(self.zone_path, OBJ_COLL_NAME)
+ obj_coll = self.irods.collections.create(obj_coll_path)
+ obj = self.make_irods_object(obj_coll, OBJ_NAME)
+ self.make_irods_md5_object(obj)
+ obj_path = os.path.join(obj_coll_path, OBJ_NAME)
+ sample_obj_path = os.path.join(
+ self.sample_path, OBJ_COLL_NAME, OBJ_NAME
+ )
+
+ self.assertEqual(self.irods.collections.exists(empty_coll_path), True)
+ self.assertEqual(self.irods.collections.exists(obj_coll_path), True)
+ self.assertEqual(self.irods.data_objects.exists(obj_path), True)
+ self.assertEqual(
+ self.irods.data_objects.exists(obj_path + '.md5'), True
+ )
+ self.assertEqual(self.irods.data_objects.exists(sample_obj_path), False)
+ self.assertEqual(
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), False
+ )
+
+ flow_data = {'zone_uuid': str(self.zone.sodar_uuid)}
+ flow = self.taskflow.get_flow(
+ irods_backend=self.irods_backend,
+ project=self.project,
+ flow_name='landing_zone_move',
+ flow_data=flow_data,
+ )
+ self._build_and_run(flow)
+
+ self.zone.refresh_from_db()
+ self.assertEqual(self.zone.status, ZONE_STATUS_MOVED)
+ self.assertEqual(self.irods.collections.exists(self.zone_path), False)
+ sample_empty_path = os.path.join(self.sample_path, COLL_NAME)
+ # An empty collection should not be created by moving
+ self.assertEqual(
+ self.irods.collections.exists(sample_empty_path), False
+ )
+ self.assertEqual(self.irods.data_objects.exists(sample_obj_path), True)
+ self.assertEqual(
+ self.irods.data_objects.exists(sample_obj_path + '.md5'), True
+ )
+ self.assert_irods_access(
+ self.group_name, sample_obj_path, self.irods_access_read
+ )
+ self.assert_irods_access(
+ self.group_name, sample_obj_path + '.md5', self.irods_access_read
)
@@ -1041,12 +1178,10 @@ def test_create(self):
self.irods_backend.get_path(project)
)
self.assertEqual(
- project_coll.metadata.get_one('title').value,
- project.title,
+ project_coll.metadata.get_one('title').value, project.title
)
self.assertEqual(
- project_coll.metadata.get_one('description').value,
- META_EMPTY_VALUE,
+ project_coll.metadata.get_one('description').value, META_EMPTY_VALUE
)
self.assertEqual(
project_coll.metadata.get_one('parent_uuid').value,
@@ -1075,12 +1210,10 @@ def test_update_metadata(self):
self.assertNotEqual(self.project.description, UPDATED_DESC)
project_coll = self.irods.collections.get(self.project_path)
self.assertEqual(
- project_coll.metadata.get_one('title').value,
- self.project.title,
+ project_coll.metadata.get_one('title').value, self.project.title
)
self.assertEqual(
- project_coll.metadata.get_one('description').value,
- META_EMPTY_VALUE,
+ project_coll.metadata.get_one('description').value, META_EMPTY_VALUE
)
self.project.title = UPDATED_TITLE
@@ -1097,12 +1230,10 @@ def test_update_metadata(self):
project_coll = self.irods.collections.get(self.project_path)
self.assertEqual(
- project_coll.metadata.get_one('title').value,
- UPDATED_TITLE,
+ project_coll.metadata.get_one('title').value, UPDATED_TITLE
)
self.assertEqual(
- project_coll.metadata.get_one('description').value,
- UPDATED_DESC,
+ project_coll.metadata.get_one('description').value, UPDATED_DESC
)
def test_update_parent(self):
@@ -1180,7 +1311,10 @@ def test_enable_access(self):
)
self.assert_irods_access(PUBLIC_GROUP, self.sample_path, None)
- flow_data = {'path': self.sample_path, 'access': True}
+ flow_data = {
+ 'path': self.sample_path,
+ 'access': True,
+ }
flow = self.taskflow.get_flow(
irods_backend=self.irods_backend,
project=self.project,
@@ -1211,7 +1345,10 @@ def test_disable_access(self):
PUBLIC_GROUP, self.sample_path, self.irods_access_read
)
- flow_data = {'path': self.sample_path, 'access': False}
+ flow_data = {
+ 'path': self.sample_path,
+ 'access': False,
+ }
flow = self.taskflow.get_flow(
irods_backend=self.irods_backend,
project=self.project,
@@ -1234,7 +1371,10 @@ def test_revert(self):
)
self.assert_irods_access(PUBLIC_GROUP, self.sample_path, None)
- flow_data = {'path': self.sample_path, 'access': True}
+ flow_data = {
+ 'path': self.sample_path,
+ 'access': True,
+ }
flow = self.taskflow.get_flow(
irods_backend=self.irods_backend,
project=self.project,
@@ -1675,12 +1815,11 @@ def test_delete(self):
project=self.project, active=True
).first()
)
- flow_data = {}
flow = self.taskflow.get_flow(
irods_backend=self.irods_backend,
project=self.project,
flow_name='sheet_delete',
- flow_data=flow_data,
+ flow_data={},
)
self.assertEqual(type(flow), SheetDeleteFlow)
self._build_and_run(flow)
@@ -1700,12 +1839,11 @@ def test_delete_colls(self):
self.make_irods_colls(self.investigation)
self.assertEqual(self.irods.collections.exists(self.sample_path), True)
- flow_data = {}
flow = self.taskflow.get_flow(
irods_backend=self.irods_backend,
project=self.project,
flow_name='sheet_delete',
- flow_data=flow_data,
+ flow_data={},
)
self.assertEqual(type(flow), SheetDeleteFlow)
self._build_and_run(flow)
@@ -1739,12 +1877,11 @@ def test_delete_zone(self):
zone_path = self.irods_backend.get_path(zone)
self.assertEqual(self.irods.collections.exists(zone_path), True)
- flow_data = {}
flow = self.taskflow.get_flow(
irods_backend=self.irods_backend,
project=self.project,
flow_name='sheet_delete',
- flow_data=flow_data,
+ flow_data={},
)
self.assertEqual(type(flow), SheetDeleteFlow)
self._build_and_run(flow)
diff --git a/taskflowbackend/tests/test_tasks.py b/taskflowbackend/tests/test_tasks.py
index 3f5a353ab..62f455efc 100644
--- a/taskflowbackend/tests/test_tasks.py
+++ b/taskflowbackend/tests/test_tasks.py
@@ -34,6 +34,7 @@
NEW_COLL2_NAME = 'test_new2'
TEST_OBJ_NAME = 'move_obj'
SUB_COLL_NAME = 'sub'
+SUB_COLL_NAME2 = 'sub2'
MOVE_COLL_NAME = 'move_coll'
TEST_USER = USER_PREFIX + 'user3'
@@ -860,7 +861,7 @@ def test_execute_twice(self):
self.assertEqual(user_access.access_name, self.irods_access_read)
def test_revert_created(self):
- """Test access setting"""
+ """Test reverting created access"""
self._add_task(
cls=SetAccessTask,
name='Set access',
@@ -883,7 +884,7 @@ def test_revert_created(self):
# self.assertEqual(user_access.access_name, TEST_ACCESS_NULL)
def test_revert_modified(self):
- """Test access setting reverting after modification"""
+ """Test reverting modified access"""
self._add_task(
cls=SetAccessTask,
name='Set access',
@@ -1607,6 +1608,218 @@ def test_overwrite_failure(self):
# TODO: Test Checksum verifying
+class TestBatchSetAccessTask(IRODSTaskTestBase):
+ """Tests for BatchSetAccessTask"""
+
+ def setUp(self):
+ super().setUp()
+ self.sub_coll_path = os.path.join(self.test_coll_path, SUB_COLL_NAME)
+ self.sub_coll_path2 = os.path.join(self.test_coll_path, SUB_COLL_NAME2)
+ self.irods.collections.create(self.sub_coll_path)
+ self.irods.collections.create(self.sub_coll_path2)
+ self.paths = [self.sub_coll_path, self.sub_coll_path2]
+ # Init default user group
+ self.irods.user_groups.create(DEFAULT_USER_GROUP)
+ self.access_lookup = self.irods_backend.get_access_lookup(self.irods)
+
+ def test_execute_read(self):
+ """Test access setting for read"""
+ self._add_task(
+ cls=BatchSetAccessTask,
+ name='Set access',
+ inject={
+ 'access_name': IRODS_ACCESS_READ_IN,
+ 'paths': self.paths,
+ 'user_name': DEFAULT_USER_GROUP,
+ 'access_lookup': self.access_lookup,
+ 'irods_backend': self.irods_backend,
+ },
+ )
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path, None)
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None)
+ result = self._run_flow()
+
+ self.assertEqual(result, True)
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read
+ )
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_read
+ )
+
+ def test_execute_write(self):
+ """Test access setting for write/modify"""
+ self._add_task(
+ cls=BatchSetAccessTask,
+ name='Set access',
+ inject={
+ 'access_name': IRODS_ACCESS_WRITE_IN,
+ 'paths': self.paths,
+ 'user_name': DEFAULT_USER_GROUP,
+ 'access_lookup': self.access_lookup,
+ 'irods_backend': self.irods_backend,
+ },
+ )
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path, None)
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None)
+ result = self._run_flow()
+
+ self.assertEqual(result, True)
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_write
+ )
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_write
+ )
+
+ def test_execute_mixed(self):
+ """Test access setting for both new and previously set access levels"""
+ self._add_task(
+ cls=SetAccessTask,
+ name='Set access',
+ inject={
+ 'access_name': IRODS_ACCESS_READ_IN,
+ 'path': self.sub_coll_path,
+ 'user_name': DEFAULT_USER_GROUP,
+ 'access_lookup': self.access_lookup,
+ 'irods_backend': self.irods_backend,
+ },
+ )
+ self._run_flow()
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read
+ )
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None)
+
+ self.flow = self._init_flow()
+ self._add_task(
+ cls=BatchSetAccessTask,
+ name='Set access',
+ inject={
+ 'access_name': IRODS_ACCESS_WRITE_IN,
+ 'paths': self.paths,
+ 'user_name': DEFAULT_USER_GROUP,
+ 'access_lookup': self.access_lookup,
+ 'irods_backend': self.irods_backend,
+ },
+ )
+ result = self._run_flow()
+
+ self.assertEqual(result, True)
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_write
+ )
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_write
+ )
+
+ def test_revert_created(self):
+ """Test reverting created access"""
+ self._add_task(
+ cls=BatchSetAccessTask,
+ name='Set access',
+ inject={
+ 'access_name': IRODS_ACCESS_READ_IN,
+ 'paths': self.paths,
+ 'user_name': DEFAULT_USER_GROUP,
+ 'access_lookup': self.access_lookup,
+ 'irods_backend': self.irods_backend,
+ },
+ force_fail=True,
+ ) # FAIL
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path, None)
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None)
+ result = self._run_flow()
+
+ self.assertNotEqual(result, True)
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path, None)
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None)
+
+ def test_revert_modified(self):
+ """Test reverting modified access"""
+ self._add_task(
+ cls=BatchSetAccessTask,
+ name='Set access',
+ inject={
+ 'access_name': IRODS_ACCESS_READ_IN,
+ 'paths': self.paths,
+ 'user_name': DEFAULT_USER_GROUP,
+ 'access_lookup': self.access_lookup,
+ 'irods_backend': self.irods_backend,
+ },
+ )
+ self._run_flow()
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read
+ )
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_read
+ )
+
+ self.flow = self._init_flow()
+ self._add_task(
+ cls=BatchSetAccessTask,
+ name='Set access',
+ inject={
+ 'access_name': IRODS_ACCESS_WRITE_IN,
+ 'paths': self.paths,
+ 'user_name': DEFAULT_USER_GROUP,
+ 'access_lookup': self.access_lookup,
+ 'irods_backend': self.irods_backend,
+ },
+ force_fail=True,
+ ) # FAIL
+ result = self._run_flow()
+
+ self.assertNotEqual(result, True)
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read
+ )
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path2, self.irods_access_read
+ )
+
+ def test_revert_mixed(self):
+ """Test reverting access for both new and existing access levels"""
+ self._add_task(
+ cls=SetAccessTask,
+ name='Set access',
+ inject={
+ 'access_name': IRODS_ACCESS_READ_IN,
+ 'path': self.sub_coll_path,
+ 'user_name': DEFAULT_USER_GROUP,
+ 'access_lookup': self.access_lookup,
+ 'irods_backend': self.irods_backend,
+ },
+ )
+ self._run_flow()
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read
+ )
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None)
+
+ self.flow = self._init_flow()
+ self._add_task(
+ cls=BatchSetAccessTask,
+ name='Set access',
+ inject={
+ 'access_name': IRODS_ACCESS_WRITE_IN,
+ 'paths': self.paths,
+ 'user_name': DEFAULT_USER_GROUP,
+ 'access_lookup': self.access_lookup,
+ 'irods_backend': self.irods_backend,
+ },
+ force_fail=True,
+ ) # FAIL
+ result = self._run_flow()
+
+ self.assertNotEqual(result, True)
+ self.assert_irods_access(
+ DEFAULT_USER_GROUP, self.sub_coll_path, self.irods_access_read
+ )
+ self.assert_irods_access(DEFAULT_USER_GROUP, self.sub_coll_path2, None)
+
+
class TestBatchCreateCollectionsTask(IRODSTaskTestBase):
"""Tests for BatchCreateCollectionsTask"""