diff --git a/CHANGELOG b/CHANGELOG index 791c594bb10..ae86734434f 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,20 @@ We follow the CalVer (https://calver.org/) versioning scheme: YY.MINOR.MICRO. +19.26.0 (2019-09-11) +=================== +- Create a custom through table for linking files and versions + for storing version names. Supports different versions of the same + file having different names. +- Update README for populating institutions. + + +19.25.0 (2019-09-05) +=================== +- Automate account deactivation if users have no content +- Clean up EZID workflow +- Check redirect URL's for spam + 19.24.0 (2019-08-27) =================== - APIv2: Allow creating a node with a license attached on creation diff --git a/README-docker-compose.md b/README-docker-compose.md index dc0742940e1..d84a2670014 100644 --- a/README-docker-compose.md +++ b/README-docker-compose.md @@ -147,7 +147,7 @@ - `docker-compose run --rm web python manage.py migrate` - Populate institutions: - After resetting your database or with a new install you will need to populate the table of institutions. **You must have run migrations first.** - - `docker-compose run --rm web python -m scripts.populate_institutions test` + - `docker-compose run --rm web python -m scripts.populate_institutions -e test -a` - Populate preprint, registration, and collection providers: - After resetting your database or with a new install, the required providers and subjects will be created automatically **when you run migrations.** To create more: - `docker-compose run --rm web python manage.py populate_fake_providers` diff --git a/addons/base/views.py b/addons/base/views.py index b31798b24a1..44a83d2c4a0 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -37,7 +37,7 @@ from addons.base import signals as file_signals from addons.base.utils import format_last_known_metadata, get_mfr_url from osf import features -from osf.models import (BaseFileNode, TrashedFileNode, +from osf.models import (BaseFileNode, TrashedFileNode, BaseFileVersionsThrough, OSFUser, AbstractNode, Preprint, NodeLog, DraftRegistration, RegistrationSchema, Guid, FileVersionUserMetadata, FileVersion) @@ -875,6 +875,10 @@ def addon_view_file(auth, node, file_node, version): args={'url': download_url.url} ) + version_names = BaseFileVersionsThrough.objects.filter( + basefilenode_id=file_node.id + ).order_by('-fileversion_id').values_list('version_name', flat=True) + ret.update({ 'urls': { 'render': render_url.url, @@ -902,6 +906,7 @@ def addon_view_file(auth, node, file_node, version): 'allow_comments': file_node.provider in settings.ADDONS_COMMENTABLE, 'checkout_user': file_node.checkout._id if file_node.checkout else None, 'pre_reg_checkout': is_pre_reg_checkout(node, file_node), + 'version_names': list(version_names) }) ret.update(rubeus.collect_addon_assets(node)) diff --git a/addons/forward/models.py b/addons/forward/models.py index 6cf0d8e8544..8cc518f7887 100644 --- a/addons/forward/models.py +++ b/addons/forward/models.py @@ -1,10 +1,12 @@ # -*- coding: utf-8 -*- +from osf.utils.requests import get_request_and_user_id, get_headers_from_request from addons.base.models import BaseNodeSettings from dirtyfields import DirtyFieldsMixin from django.db import models from osf.exceptions import ValidationValueError from osf.models.validators import validate_no_html +from osf.models import OSFUser class NodeSettings(DirtyFieldsMixin, BaseNodeSettings): @@ -33,6 +35,17 @@ def after_register(self, node, registration, user, save=True): return clone, None + def save(self, request=None, *args, **kwargs): + super(NodeSettings, self).save(*args, **kwargs) + if request: + if not hasattr(request, 'user'): # TODO: remove when Flask is removed + _, user_id = get_request_and_user_id() + user = OSFUser.load(user_id) + else: + user = request.user + + self.owner.check_spam(user, {'addons_forward_node_settings__url'}, get_headers_from_request(request)) + def clean(self): if self.url and self.owner._id in self.url: raise ValidationValueError('Circular URL') diff --git a/addons/forward/tests/test_views.py b/addons/forward/tests/test_views.py index 11907155367..a07b925b8d4 100644 --- a/addons/forward/tests/test_views.py +++ b/addons/forward/tests/test_views.py @@ -1,16 +1,18 @@ +import mock import pytest from nose.tools import assert_equal from addons.forward.tests.utils import ForwardAddonTestCase from tests.base import OsfTestCase +from website import settings pytestmark = pytest.mark.django_db -class TestForwardLogs(ForwardAddonTestCase, OsfTestCase): +class TestForward(ForwardAddonTestCase, OsfTestCase): def setUp(self): - super(TestForwardLogs, self).setUp() + super(TestForward, self).setUp() self.app.authenticate(*self.user.auth) def test_change_url_log_added(self): @@ -40,3 +42,19 @@ def test_change_timeout_log_not_added(self): self.project.logs.count(), log_count ) + + @mock.patch.object(settings, 'SPAM_CHECK_ENABLED', True) + @mock.patch('osf.models.node.Node.do_check_spam') + def test_change_url_check_spam(self, mock_check_spam): + self.project.is_public = True + self.project.save() + self.app.put_json(self.project.api_url_for('forward_config_put'), {'url': 'http://possiblyspam.com'}) + + assert mock_check_spam.called + data, _ = mock_check_spam.call_args + author, author_email, content, request_headers = data + + assert author == self.user.fullname + assert author_email == self.user.username + assert content == 'http://possiblyspam.com' + diff --git a/addons/forward/views/config.py b/addons/forward/views/config.py index b7e62ad348d..5385086d229 100644 --- a/addons/forward/views/config.py +++ b/addons/forward/views/config.py @@ -44,7 +44,7 @@ def forward_config_put(auth, node_addon, **kwargs): # Save settings and get changed fields; crash if validation fails try: dirty_fields = node_addon.get_dirty_fields() - node_addon.save() + node_addon.save(request=request) except ValidationValueError: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) diff --git a/addons/osfstorage/models.py b/addons/osfstorage/models.py index 43e521ed186..b52151d4684 100644 --- a/addons/osfstorage/models.py +++ b/addons/osfstorage/models.py @@ -313,7 +313,8 @@ def create_version(self, creator, location, metadata=None): version._find_matching_archive(save=False) version.save() - self.versions.add(version) + # Adds version to the list of file versions - using custom through table + self.add_version(version) self.save() return version diff --git a/addons/osfstorage/tests/test_models.py b/addons/osfstorage/tests/test_models.py index 4e7fb592aed..42147bf5768 100644 --- a/addons/osfstorage/tests/test_models.py +++ b/addons/osfstorage/tests/test_models.py @@ -299,11 +299,24 @@ def test_materialized_path_nested(self): def test_copy(self): to_copy = self.node_settings.get_root().append_file('Carp') copy_to = self.node_settings.get_root().append_folder('Cloud') + version = to_copy.create_version( + self.user, + { + 'service': 'cloud', + settings.WATERBUTLER_RESOURCE: 'osf', + 'object': '06d80e', + }, { + 'sha256': 'existing', + 'vault': 'the cloud', + 'archive': 'erchiv' + }) + assert_equal(to_copy.versions.first().get_basefilenode_version(to_copy).version_name, 'Carp') copied = to_copy.copy_under(copy_to) assert_not_equal(copied, to_copy) assert_equal(copied.parent, copy_to) + assert_equal(copied.versions.first().get_basefilenode_version(copied).version_name, 'Carp') assert_equal(to_copy.parent, self.node_settings.get_root()) def test_copy_node_file_to_preprint(self): @@ -347,7 +360,7 @@ def test_move_nested_between_regions(self): for _ in range(2): version = factories.FileVersionFactory(region=self.node_settings.region) - child.versions.add(version) + child.add_version(version) child.save() moved = to_move.move_under(move_to) @@ -361,8 +374,21 @@ def test_move_nested_between_regions(self): def test_copy_rename(self): to_copy = self.node_settings.get_root().append_file('Carp') copy_to = self.node_settings.get_root().append_folder('Cloud') + version = to_copy.create_version( + self.user, + { + 'service': 'cloud', + settings.WATERBUTLER_RESOURCE: 'osf', + 'object': '06d80e', + }, { + 'sha256': 'existing', + 'vault': 'the cloud', + 'archive': 'erchiv' + }) + assert_equal(to_copy.versions.first().get_basefilenode_version(to_copy).version_name, 'Carp') copied = to_copy.copy_under(copy_to, name='But') + assert_equal(copied.versions.first().get_basefilenode_version(copied).version_name, 'But') assert_equal(copied.name, 'But') assert_not_equal(copied, to_copy) @@ -381,12 +407,25 @@ def test_move(self): def test_move_and_rename(self): to_move = self.node_settings.get_root().append_file('Carp') + version = to_move.create_version( + self.user, + { + 'service': 'cloud', + settings.WATERBUTLER_RESOURCE: 'osf', + 'object': '06d80e', + }, { + 'sha256': 'existing', + 'vault': 'the cloud', + 'archive': 'erchiv' + }) move_to = self.node_settings.get_root().append_folder('Cloud') + assert_equal(to_move.versions.first().get_basefilenode_version(to_move).version_name, 'Carp') moved = to_move.move_under(move_to, name='Tuna') assert_equal(to_move, moved) assert_equal(to_move.name, 'Tuna') + assert_equal(moved.versions.first().get_basefilenode_version(moved).version_name, 'Tuna') assert_equal(moved.parent, move_to) def test_move_preprint_primary_file_to_node(self): @@ -649,7 +688,7 @@ def test_after_fork_copies_versions(self): for _ in range(num_versions): version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) fork = self.project.fork_node(self.auth_obj) fork_node_settings = fork.get_addon('osfstorage') diff --git a/addons/osfstorage/tests/test_utils.py b/addons/osfstorage/tests/test_utils.py index 7675db8bf36..7e5e82d2711 100644 --- a/addons/osfstorage/tests/test_utils.py +++ b/addons/osfstorage/tests/test_utils.py @@ -12,6 +12,7 @@ from addons.osfstorage import utils from addons.osfstorage.tests.utils import StorageTestCase +from website.files.utils import attach_versions @pytest.mark.django_db @@ -25,7 +26,7 @@ def setUp(self): factories.FileVersionFactory(creator=self.user) for __ in range(3) ] - self.record.versions = self.versions + attach_versions(self.record, self.versions) self.record.save() def test_serialize_revision(self): diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py index 4fc86d2c5bb..396d5aff6d3 100644 --- a/addons/osfstorage/tests/test_views.py +++ b/addons/osfstorage/tests/test_views.py @@ -31,17 +31,18 @@ from osf.models import files as models from addons.osfstorage.apps import osf_storage_root from addons.osfstorage import utils -from addons.base.views import make_auth +from addons.base.views import make_auth, addon_view_file from addons.osfstorage import settings as storage_settings -from api_tests.utils import create_test_file +from api_tests.utils import create_test_file, create_test_preprint_file from api.caching.settings import STORAGE_USAGE_KEY from osf_tests.factories import ProjectFactory, ApiOAuth2PersonalTokenFactory, PreprintFactory +from website.files.utils import attach_versions def create_record_with_version(path, node_settings, **kwargs): version = factories.FileVersionFactory(**kwargs) record = node_settings.get_root().append_file(path) - record.versions.add(version) + record.add_version(version) record.save() return record @@ -76,7 +77,7 @@ def test_file_metdata(self): path = u'kind/of/magíc.mp3' record = recursively_create_file(self.node_settings, path) version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) record.save() res = self.send_hook( 'osfstorage_get_metadata', @@ -91,7 +92,7 @@ def test_preprint_primary_file_metadata(self): preprint = PreprintFactory() record = preprint.primary_file version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) record.save() res = self.send_hook( 'osfstorage_get_metadata', @@ -106,7 +107,7 @@ def test_children_metadata(self): path = u'kind/of/magíc.mp3' record = recursively_create_file(self.node_settings, path) version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) record.save() res = self.send_hook( 'osfstorage_get_children', @@ -141,7 +142,7 @@ def test_children_metadata_preprint(self): preprint = PreprintFactory() record = preprint.primary_file version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) record.save() res = self.send_hook( 'osfstorage_get_children', @@ -274,7 +275,9 @@ def test_upload_create(self): assert_is_not(version, None) assert_equal([version], list(record.versions.all())) assert_not_in(version, self.record.versions.all()) + assert_equal(version.get_basefilenode_version(record).version_name, record.name) assert_equal(record.serialize(), res.json['data']) + assert_equal(version.get_basefilenode_version(record).version_name, record.name) assert_equal(res.json['data']['downloads'], self.record.get_download_count()) def test_upload_update(self): @@ -321,6 +324,7 @@ def test_upload_create_child(self): record = parent.find_child_by_name(name) assert_in(version, record.versions.all()) assert_equals(record.name, name) + assert_equals(record.versions.first().get_basefilenode_version(record).version_name, name) assert_equals(record.parent, parent) def test_upload_create_child_with_same_name(self): @@ -341,6 +345,7 @@ def test_upload_create_child_with_same_name(self): record = parent.find_child_by_name(name) assert_in(version, record.versions.all()) assert_equals(record.name, name) + assert_equals(record.versions.first().get_basefilenode_version(record).version_name, name) assert_equals(record.parent, parent) def test_upload_fail_to_create_version_due_to_checkout(self): @@ -514,6 +519,7 @@ def test_upload_update(self): version = models.FileVersion.load(res.json['version']) assert_is_not(version, None) assert_in(version, self.record.versions.all()) + assert_equal(self.record.versions.first().get_basefilenode_version(self.record).version_name, self.name) def test_upload_duplicate(self): location = { @@ -637,7 +643,7 @@ def setUp(self): self.path = 'greasy/pízza.png' self.record = recursively_create_file(self.node_settings, self.path) self.version = factories.FileVersionFactory() - self.record.versions = [self.version] + self.record.add_version(self.version) self.record.save() self.payload = { 'metadata': { @@ -711,7 +717,7 @@ def setUp(self): self.record = self.preprint.primary_file self.path = 'greasy/pízza.png' self.version = factories.FileVersionFactory() - self.record.versions = [self.version] + self.record.add_version(self.version) self.record.save() self.payload = { 'metadata': { @@ -783,7 +789,7 @@ def setUp(self): super(TestGetRevisions, self).setUp() self.path = 'tie/your/mother/down.mp3' self.record = recursively_create_file(self.node_settings, self.path) - self.record.versions = [factories.FileVersionFactory() for __ in range(15)] + attach_versions(self.record, [factories.FileVersionFactory() for __ in range(15)]) self.record.save() def get_revisions(self, fid=None, guid=None, **kwargs): @@ -1165,6 +1171,35 @@ def test_move_file_out_of_node(self): ) assert_equal(res.status_code, 200) + def test_can_rename_file(self): + file = create_test_file(self.node, self.user, filename='road_dogg.mp3') + new_name = 'JesseJames.mp3' + + res = self.send_hook( + 'osfstorage_move_hook', + {'guid': self.node._id}, + payload={ + 'action': 'rename', + 'source': file._id, + 'target': self.root_node._id, + 'user': self.user._id, + 'name': file.name, + 'destination': { + 'parent': self.root_node._id, + 'target': self.node._id, + 'name': new_name, + } + }, + target=self.node, + method='post_json', + expect_errors=True, + ) + file.reload() + + assert_equal(res.status_code, 200) + assert_equal(file.name, new_name) + assert_equal(file.versions.first().get_basefilenode_version(file).version_name, new_name) + def test_can_move_file_out_of_quickfiles_node(self): quickfiles_node = QuickFilesNode.objects.get_for_user(self.user) quickfiles_file = create_test_file(quickfiles_node, self.user, filename='slippery.mp3') @@ -1254,6 +1289,35 @@ def test_move_primary_file_out_of_preprint(self): ) assert_equal(res.status_code, 403) + def test_can_rename_file(self): + file = create_test_preprint_file(self.node, self.user, filename='road_dogg.mp3') + new_name = 'JesseJames.mp3' + + res = self.send_hook( + 'osfstorage_move_hook', + {'guid': self.node._id}, + payload={ + 'action': 'rename', + 'source': file._id, + 'target': self.root_node._id, + 'user': self.user._id, + 'name': file.name, + 'destination': { + 'parent': self.root_node._id, + 'target': self.node._id, + 'name': new_name, + } + }, + target=self.node, + method='post_json', + expect_errors=True, + ) + file.reload() + + assert_equal(res.status_code, 200) + assert_equal(file.name, new_name) + assert_equal(file.versions.first().get_basefilenode_version(file).version_name, new_name) + @pytest.mark.django_db class TestMoveHookProjectsOnly(TestMoveHook): @@ -1507,7 +1571,6 @@ def test_file_remove_tag_fail_doesnt_create_log(self, mock_log): @pytest.mark.django_db @pytest.mark.enable_bookmark_creation class TestFileViews(StorageTestCase): - def test_file_views(self): file = create_test_file(target=self.node, user=self.user) url = self.node.web_url_for('addon_view_or_download_file', path=file._id, provider=file.provider) @@ -1553,6 +1616,40 @@ def test_download_file(self): redirect = self.app.get(url, auth=self.user.auth, expect_errors=True) assert redirect.status_code == 400 + def test_addon_view_file(self): + file = create_test_file(target=self.node, user=self.user, filename='first_name') + version = factories.FileVersionFactory() + file.add_version(version) + file.move_under(self.node_settings.get_root(), name='second_name') + file.save() + + version = factories.FileVersionFactory() + file.add_version(version) + file.move_under(self.node_settings.get_root(), name='third_name') + file.save() + + ret = addon_view_file(Auth(self.user), self.node, file, version) + assert ret['version_names'] == ['third_name', 'second_name', 'first_name'] + + def test_osfstorage_download_view(self): + file = create_test_file(target=self.node, user=self.user) + version = factories.FileVersionFactory() + file.add_version(version) + file.move_under(self.node_settings.get_root(), name='new_name') + file.save() + + res = self.app.get( + api_url_for( + 'osfstorage_download', + fid=file._id, + guid=self.node._id, + **signing.sign_data(signing.default_signer, {}) + ), + auth=self.user.auth, + ) + assert res.status_code == 200 + assert res.json['data']['name'] == 'new_name' + @responses.activate @mock.patch('framework.auth.cas.get_client') def test_download_file_with_token(self, mock_get_client): diff --git a/addons/osfstorage/views.py b/addons/osfstorage/views.py index 902928e415e..b65657f420e 100644 --- a/addons/osfstorage/views.py +++ b/addons/osfstorage/views.py @@ -192,7 +192,7 @@ def osfstorage_get_children(file_node, **kwargs): , 'kind', 'file' , 'size', LATEST_VERSION.size , 'downloads', COALESCE(DOWNLOAD_COUNT, 0) - , 'version', (SELECT COUNT(*) FROM osf_basefilenode_versions WHERE osf_basefilenode_versions.basefilenode_id = F.id) + , 'version', (SELECT COUNT(*) FROM osf_basefileversionsthrough WHERE osf_basefileversionsthrough.basefilenode_id = F.id) , 'contentType', LATEST_VERSION.content_type , 'modified', LATEST_VERSION.created , 'created', EARLIEST_VERSION.created @@ -213,15 +213,15 @@ def osfstorage_get_children(file_node, **kwargs): FROM osf_basefilenode AS F LEFT JOIN LATERAL ( SELECT * FROM osf_fileversion - JOIN osf_basefilenode_versions ON osf_fileversion.id = osf_basefilenode_versions.fileversion_id - WHERE osf_basefilenode_versions.basefilenode_id = F.id + JOIN osf_basefileversionsthrough ON osf_fileversion.id = osf_basefileversionsthrough.fileversion_id + WHERE osf_basefileversionsthrough.basefilenode_id = F.id ORDER BY created DESC LIMIT 1 ) LATEST_VERSION ON TRUE LEFT JOIN LATERAL ( SELECT * FROM osf_fileversion - JOIN osf_basefilenode_versions ON osf_fileversion.id = osf_basefilenode_versions.fileversion_id - WHERE osf_basefilenode_versions.basefilenode_id = F.id + JOIN osf_basefileversionsthrough ON osf_fileversion.id = osf_basefileversionsthrough.fileversion_id + WHERE osf_basefileversionsthrough.basefilenode_id = F.id ORDER BY created ASC LIMIT 1 ) EARLIEST_VERSION ON TRUE @@ -240,9 +240,9 @@ def osfstorage_get_children(file_node, **kwargs): SELECT EXISTS( SELECT (1) FROM osf_fileversionusermetadata INNER JOIN osf_fileversion ON osf_fileversionusermetadata.file_version_id = osf_fileversion.id - INNER JOIN osf_basefilenode_versions ON osf_fileversion.id = osf_basefilenode_versions.fileversion_id + INNER JOIN osf_basefileversionsthrough ON osf_fileversion.id = osf_basefileversionsthrough.fileversion_id WHERE osf_fileversionusermetadata.user_id = %s - AND osf_basefilenode_versions.basefilenode_id = F.id + AND osf_basefileversionsthrough.basefilenode_id = F.id LIMIT 1 ) ) SEEN_FILE ON TRUE @@ -336,6 +336,7 @@ def osfstorage_create_child(file_node, payload, **kwargs): )) except KeyError: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) + current_version = file_node.get_version() new_version = file_node.create_version(user, location, metadata) @@ -403,9 +404,12 @@ def osfstorage_download(file_node, payload, **kwargs): raise make_error(http_status.HTTP_400_BAD_REQUEST, message_short='Version must be an integer if not specified') version = file_node.get_version(version_id, required=True) + file_version_thru = version.get_basefilenode_version(file_node) + name = file_version_thru.version_name if file_version_thru else file_node.name + return { 'data': { - 'name': file_node.name, + 'name': name, 'path': version.location_hash, }, 'settings': { diff --git a/api/addons/forward/test_views.py b/api/addons/forward/test_views.py new file mode 100644 index 00000000000..5e3d3dbd7bd --- /dev/null +++ b/api/addons/forward/test_views.py @@ -0,0 +1,81 @@ +import mock +import pytest + +from addons.forward.tests.utils import ForwardAddonTestCase +from tests.base import OsfTestCase +from website import settings +from tests.json_api_test_app import JSONAPITestApp + +pytestmark = pytest.mark.django_db + +class TestForward(ForwardAddonTestCase, OsfTestCase): + """ + Forward (the redirect url has two v2 routes, one is addon based `/v2/nodes/{}/addons/forward/` one is node settings + based `/v2/nodes/{}/settings/` they both need to be checked for spam each time they are used to modify a redirect url. + """ + + django_app = JSONAPITestApp() + + def setUp(self): + super(TestForward, self).setUp() + self.app.authenticate(*self.user.auth) + + @mock.patch.object(settings, 'SPAM_CHECK_ENABLED', True) + @mock.patch('osf.models.node.Node.do_check_spam') + def test_change_url_check_spam(self, mock_check_spam): + self.project.is_public = True + self.project.save() + self.django_app.put_json_api( + '/v2/nodes/{}/addons/forward/'.format(self.project._id), + {'data': {'attributes': {'url': 'http://possiblyspam.com'}}}, + auth=self.user.auth, + ) + + assert mock_check_spam.called + data, _ = mock_check_spam.call_args + author, author_email, content, request_headers = data + + assert author == self.user.fullname + assert author_email == self.user.username + assert content == 'http://possiblyspam.com' + + @mock.patch.object(settings, 'SPAM_CHECK_ENABLED', True) + @mock.patch('osf.models.node.Node.do_check_spam') + def test_change_url_check_spam_node_settings(self, mock_check_spam): + self.project.is_public = True + self.project.save() + + payload = { + 'data': { + 'type': 'node-settings', + 'attributes': { + 'access_requests_enabled': False, + 'redirect_link_url': 'http://possiblyspam.com', + }, + }, + } + + self.django_app.put_json_api( + '/v2/nodes/{}/settings/'.format(self.project._id), + payload, + auth=self.user.auth, + ) + + assert mock_check_spam.called + data, _ = mock_check_spam.call_args + author, author_email, content, request_headers = data + + assert author == self.user.fullname + assert author_email == self.user.username + assert content == 'http://possiblyspam.com' + + def test_invalid_url(self): + res = self.django_app.put_json_api( + '/v2/nodes/{}/addons/forward/'.format(self.project._id), + {'data': {'attributes': {'url': 'bad url'}}}, + auth=self.user.auth, expect_errors=True, + ) + assert res.status_code == 400 + error = res.json['errors'][0] + + assert error['detail'] == 'Enter a valid URL.' diff --git a/api/files/serializers.py b/api/files/serializers.py index 4e4bbbd5501..2e2782a4850 100644 --- a/api/files/serializers.py +++ b/api/files/serializers.py @@ -410,6 +410,7 @@ class FileVersionSerializer(JSONAPISerializer): size = ser.IntegerField(read_only=True, help_text='The size of this file at this version') content_type = ser.CharField(read_only=True, help_text='The mime type of this file at this verison') date_created = VersionedDateTimeField(source='created', read_only=True, help_text='The date that this version was created') + name = ser.SerializerMethodField() links = LinksField({ 'self': 'self_url', 'html': 'absolute_url', @@ -417,6 +418,10 @@ class FileVersionSerializer(JSONAPISerializer): 'render': 'get_render_link', }) + def get_name(self, obj): + file = self.context['file'] + return obj.get_basefilenode_version(file).version_name + class Meta: type_ = 'file_versions' diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index fb585f31263..cf5c9cecf7c 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -897,7 +897,7 @@ class Meta: # Forward-specific label = ser.CharField(required=False, allow_blank=True) - url = ser.CharField(required=False, allow_blank=True) + url = ser.URLField(required=False, allow_blank=True) links = LinksField({ 'self': 'get_absolute_url', @@ -923,7 +923,9 @@ def create(self, validated_data): class ForwardNodeAddonSettingsSerializer(NodeAddonSettingsSerializerBase): def update(self, instance, validated_data): - auth = Auth(self.context['request'].user) + request = self.context['request'] + user = request.user + auth = Auth(user) set_url = 'url' in validated_data set_label = 'label' in validated_data @@ -953,7 +955,10 @@ def update(self, instance, validated_data): instance.label = label url_changed = True - instance.save() + try: + instance.save(request=request) + except ValidationError as e: + raise exceptions.ValidationError(detail=str(e)) if url_changed: # add log here because forward architecture isn't great @@ -968,7 +973,6 @@ def update(self, instance, validated_data): auth=auth, save=True, ) - return instance @@ -1805,7 +1809,10 @@ def update_forward_fields(self, obj, validated_data, auth): save_forward = True if save_forward: - forward_addon.save() + try: + forward_addon.save(request=self.context['request']) + except ValidationError as e: + raise exceptions.ValidationError(detail=str(e)) def enable_or_disable_addon(self, obj, should_enable, addon_name, auth): """ diff --git a/api/users/serializers.py b/api/users/serializers.py index e0f8a314e9e..a7beacd096f 100644 --- a/api/users/serializers.py +++ b/api/users/serializers.py @@ -20,9 +20,8 @@ from osf.exceptions import ValidationValueError, ValidationError, BlacklistedEmailError from osf.models import OSFUser, QuickFilesNode, Preprint from osf.utils.requests import string_type_request_headers -from website.settings import MAILCHIMP_GENERAL_LIST, OSF_HELP_LIST, CONFIRM_REGISTRATIONS_BY_EMAIL, OSF_SUPPORT_EMAIL +from website.settings import MAILCHIMP_GENERAL_LIST, OSF_HELP_LIST, CONFIRM_REGISTRATIONS_BY_EMAIL from osf.models.provider import AbstractProviderGroupObjectPermission -from website import mails from website.profile.views import update_osf_help_mails_subscription, update_mailchimp_subscription from api.nodes.serializers import NodeSerializer, RegionRelationshipField from api.base.schemas.utils import validate_user_json, from_json @@ -428,6 +427,7 @@ class UserSettingsSerializer(JSONAPISerializer): subscribe_osf_general_email = ser.SerializerMethodField() subscribe_osf_help_email = ser.SerializerMethodField() deactivation_requested = ser.BooleanField(source='requested_deactivation', required=False) + contacted_deactivation = ser.BooleanField(required=False, read_only=True) secret = ser.SerializerMethodField(read_only=True) def to_representation(self, instance): @@ -526,18 +526,12 @@ def verify_two_factor(self, instance, value, two_factor_addon): two_factor_addon.save() def request_deactivation(self, instance, requested_deactivation): + if instance.requested_deactivation != requested_deactivation: - if requested_deactivation: - mails.send_mail( - to_addr=OSF_SUPPORT_EMAIL, - mail=mails.REQUEST_DEACTIVATION, - user=instance, - can_change_preferences=False, - ) - instance.email_last_sent = timezone.now() instance.requested_deactivation = requested_deactivation + if not requested_deactivation: + instance.contacted_deactivation = False instance.save() - return def to_representation(self, instance): """ diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py index 78d36975af2..b5914669150 100644 --- a/api_tests/files/views/test_file_detail.py +++ b/api_tests/files/views/test_file_detail.py @@ -674,7 +674,9 @@ def test_listing(self, app, user, file): assert res.status_code == 200 assert len(res.json['data']) == 2 assert res.json['data'][0]['id'] == '2' + assert res.json['data'][0]['attributes']['name'] == file.name assert res.json['data'][1]['id'] == '1' + assert res.json['data'][1]['attributes']['name'] == file.name def test_load_and_property(self, app, user, file): # test_by_id diff --git a/api_tests/nodes/views/test_node_detail.py b/api_tests/nodes/views/test_node_detail.py index 0b77bf2c91c..1ac056eaa6a 100644 --- a/api_tests/nodes/views/test_node_detail.py +++ b/api_tests/nodes/views/test_node_detail.py @@ -1348,8 +1348,7 @@ def test_set_node_private_updates_doi( assert res.status_code == 200 project_public.reload() assert not project_public.is_public - mock_update_doi_metadata.assert_called_with( - project_public._id, status='unavailable') + mock_update_doi_metadata.assert_called_with(project_public._id) @pytest.mark.enable_enqueue_task @mock.patch('website.preprints.tasks.update_or_enqueue_on_preprint_updated') diff --git a/api_tests/nodes/views/test_node_wiki_list.py b/api_tests/nodes/views/test_node_wiki_list.py index 7167aef45b7..27c3074813b 100644 --- a/api_tests/nodes/views/test_node_wiki_list.py +++ b/api_tests/nodes/views/test_node_wiki_list.py @@ -338,7 +338,7 @@ def test_create_public_wiki_page(self, app, user_write_contributor, url_node_pub assert res.json['data']['attributes']['name'] == page_name def test_create_public_wiki_page_with_content(self, app, user_write_contributor, url_node_public, project_public): - page_name = fake.word() + page_name = 'using random variables in tests can sometimes expose Testmon problems!' payload = create_wiki_payload(page_name) payload['data']['attributes']['content'] = 'my first wiki page' res = app.post_json_api(url_node_public, payload, auth=user_write_contributor.auth) diff --git a/api_tests/users/views/test_user_settings_detail.py b/api_tests/users/views/test_user_settings_detail.py index e4dd06d46d9..d0433037a41 100644 --- a/api_tests/users/views/test_user_settings_detail.py +++ b/api_tests/users/views/test_user_settings_detail.py @@ -250,31 +250,24 @@ def test_patch_requested_deactivation(self, mock_mail, app, user_one, user_two, assert res.status_code == 403 # Logged in, request to deactivate - assert user_one.email_last_sent is None assert user_one.requested_deactivation is False res = app.patch_json_api(url, payload, auth=user_one.auth) assert res.status_code == 200 user_one.reload() - assert user_one.email_last_sent is not None assert user_one.requested_deactivation is True - assert mock_mail.call_count == 1 # Logged in, deactivation already requested res = app.patch_json_api(url, payload, auth=user_one.auth) assert res.status_code == 200 user_one.reload() - assert user_one.email_last_sent is not None assert user_one.requested_deactivation is True - assert mock_mail.call_count == 1 # Logged in, request to cancel deactivate request payload['data']['attributes']['deactivation_requested'] = False res = app.patch_json_api(url, payload, auth=user_one.auth) assert res.status_code == 200 user_one.reload() - assert user_one.email_last_sent is not None assert user_one.requested_deactivation is False - assert mock_mail.call_count == 1 @mock.patch('framework.auth.views.mails.send_mail') def test_patch_invalid_type(self, mock_mail, app, user_one, url, payload): diff --git a/osf/management/commands/data_storage_usage.py b/osf/management/commands/data_storage_usage.py index 4d557bd9c9e..5f68baaa62d 100644 --- a/osf/management/commands/data_storage_usage.py +++ b/osf/management/commands/data_storage_usage.py @@ -48,7 +48,7 @@ LAST_ROW_SQL = """ SELECT obfnv.id AS fileversion_id - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv ORDER BY obfnv.id DESC LIMIT 1 """ @@ -73,7 +73,7 @@ node.is_deleted AS node_deleted, node.spam_status, preprint.id IS NOT NULL AS is_supplementary_node - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN addons_osfstorage_region region ON version.region_id = region.id @@ -89,7 +89,7 @@ TOTAL_FILE_SIZE_SUM_SQL = """ SELECT 'total', sum(size) AS deleted_size_sum - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id WHERE file.provider = 'osfstorage' @@ -100,7 +100,7 @@ DELETED_FILE_SIZE_SUM_SQL = """ SELECT 'deleted', sum(size) AS deleted_size_sum - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id WHERE file.provider = 'osfstorage' @@ -112,7 +112,7 @@ REGIONAL_NODE_SIZE_SUM_SQL = """ SELECT region.name, sum(size) - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN addons_osfstorage_region region ON version.region_id = region.id @@ -128,7 +128,7 @@ node.type = 'osf.node' AND NOT node.is_public ) THEN 'osf.private-node' ELSE node.type END AS type, sum(size) - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN osf_abstractnode node ON file.target_object_id = node.id @@ -144,7 +144,7 @@ ND_QUICK_FILE_SIZE_SUM_SQL = """ SELECT node.type, sum(size) - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN osf_abstractnode node ON file.target_object_id = node.id @@ -161,7 +161,7 @@ ND_PREPRINT_SUPPLEMENT_SIZE_SUM_SQL = """ SELECT 'nd_supplement', sum(size) AS supplementary_node_size - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN osf_abstractnode node ON node.id = file.target_object_id @@ -193,7 +193,7 @@ preprint.deleted IS NOT NULL AS preprint_deleted, preprint.spam_status, FALSE AS is_supplementary_node - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN addons_osfstorage_region region ON version.region_id = region.id @@ -207,7 +207,7 @@ ND_PREPRINT_SIZE_SUM_SQL = """ SELECT 'nd_preprints', sum(size) AS nd_preprint_size_sum - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN osf_preprint preprint ON preprint.id = file.target_object_id @@ -220,7 +220,7 @@ REGIONAL_PREPRINT_SIZE_SUM_SQL = """ SELECT region.name, sum(size) - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN addons_osfstorage_region region ON version.region_id = region.id diff --git a/osf/management/commands/deactivate_requested_accounts.py b/osf/management/commands/deactivate_requested_accounts.py new file mode 100644 index 00000000000..25ef179985b --- /dev/null +++ b/osf/management/commands/deactivate_requested_accounts.py @@ -0,0 +1,81 @@ +import logging + +from website import mails +from django.utils import timezone + +from framework.celery_tasks import app as celery_app +from website.app import setup_django +setup_django() +from osf.models import OSFUser +from website.settings import OSF_SUPPORT_EMAIL, OSF_CONTACT_EMAIL +from django.core.management.base import BaseCommand + +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO) + + +def deactivate_requested_accounts(dry_run=True): + users = OSFUser.objects.filter(requested_deactivation=True, contacted_deactivation=False, date_disabled__isnull=True) + + for user in users: + if user.has_resources: + logger.info('OSF support is being emailed about deactivating the account of user {}.'.format(user._id)) + if not dry_run: + mails.send_mail( + to_addr=OSF_SUPPORT_EMAIL, + mail=mails.REQUEST_DEACTIVATION, + user=user, + can_change_preferences=False, + ) + else: + logger.info('Disabling user {}.'.format(user._id)) + if not dry_run: + user.disable_account() + user.is_registered = False + mails.send_mail( + to_addr=user.username, + mail=mails.REQUEST_DEACTIVATION_COMPLETE, + user=user, + contact_email=OSF_CONTACT_EMAIL, + can_change_preferences=False, + ) + + user.contacted_deactivation = True + user.email_last_sent = timezone.now() + if not dry_run: + user.save() + + if dry_run: + logger.info('Dry run complete') + + +@celery_app.task(name='management.commands.deactivate_requested_accounts') +def main(dry_run=False): + """ + This task runs nightly and emails users who want to delete there account with info on how to do so. Users who don't + have any content can be automatically deleted. + """ + if dry_run: + logger.info('This is a dry run; no changes will be saved, and no emails will be sent.') + deactivate_requested_accounts(dry_run=dry_run) + + +class Command(BaseCommand): + help = ''' + If there are any users who want to be deactivated we will either: immediately deactivate, or if they have active + resources (undeleted nodes, preprints etc) we contact admin to guide the user through the deactivation process. + ''' + + def add_arguments(self, parser): + super(Command, self).add_arguments(parser) + parser.add_argument( + '--dry', + action='store_true', + dest='dry_run', + help='Dry run', + ) + + # Management command handler + def handle(self, *args, **options): + dry_run = options.get('dry_run', True) + main(dry_run=dry_run) diff --git a/osf/management/commands/force_archive.py b/osf/management/commands/force_archive.py index b3ef341405b..a51fb8c68fb 100644 --- a/osf/management/commands/force_archive.py +++ b/osf/management/commands/force_archive.py @@ -38,6 +38,7 @@ from scripts import utils as script_utils from website.archiver import ARCHIVER_SUCCESS from website.settings import ARCHIVE_TIMEOUT_TIMEDELTA, ARCHIVE_PROVIDER +from website.files.utils import attach_versions logger = logging.getLogger(__name__) @@ -197,7 +198,7 @@ def manually_archive(tree, reg, node_settings, parent=None): if file_obj.versions.exists() and filenode['version']: # Min version identifier is 1 if not cloned.versions.filter(identifier=filenode['version']).exists(): - cloned.versions.add(*file_obj.versions.filter(identifier__lte=filenode['version'])) + attach_versions(cloned, file_obj.versions.filter(identifier__lte=filenode['version']), file_obj) if filenode.get('children'): manually_archive(filenode['children'], reg, node_settings, parent=cloned) diff --git a/osf/migrations/0181_osfuser_contacted_deactivation.py b/osf/migrations/0181_osfuser_contacted_deactivation.py new file mode 100644 index 00000000000..046f2fe968b --- /dev/null +++ b/osf/migrations/0181_osfuser_contacted_deactivation.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-13 15:32 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0180_finalize_token_scopes_mig'), + ] + + operations = [ + migrations.AddField( + model_name='osfuser', + name='contacted_deactivation', + field=models.BooleanField(default=False), + ), + ] diff --git a/osf/migrations/0182_add_custom_file_versions_through.py b/osf/migrations/0182_add_custom_file_versions_through.py new file mode 100644 index 00000000000..e4d5f1b19b6 --- /dev/null +++ b/osf/migrations/0182_add_custom_file_versions_through.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-04-07 23:20 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0181_osfuser_contacted_deactivation'), + ] + + operations = [ + migrations.CreateModel( + name='BaseFileVersionsThrough', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('version_name', models.TextField(blank=True)), + ('basefilenode', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='osf.BaseFileNode')), + ('fileversion', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='osf.FileVersion')), + ], + ), + migrations.AlterUniqueTogether( + name='basefileversionsthrough', + unique_together=set([('basefilenode', 'fileversion')]), + ), + migrations.AlterIndexTogether( + name='basefileversionsthrough', + index_together=set([('basefilenode', 'fileversion')]), + ), + ] diff --git a/osf/migrations/0183_populate_file_versions_through.py b/osf/migrations/0183_populate_file_versions_through.py new file mode 100644 index 00000000000..703c2767588 --- /dev/null +++ b/osf/migrations/0183_populate_file_versions_through.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-03-03 17:52 +from __future__ import unicode_literals + +import logging + +from django.db import migrations, connection + +logger = logging.getLogger(__file__) + + +def restore_default_through_table(state, schema): + sql = """ + DROP TABLE osf_basefilenode_versions; + CREATE TABLE osf_basefilenode_versions AS + SELECT + new_thru.basefilenode_id, + new_thru.fileversion_id + FROM + osf_basefileversionsthrough AS new_thru; + + ALTER TABLE osf_basefilenode_versions ADD COLUMN id SERIAL PRIMARY KEY; + ALTER TABLE osf_basefilenode_versions ADD CONSTRAINT osf_basefilenod_basefilenode_id_b0knah27_fk_osf_basefilenode_id FOREIGN KEY (basefilenode_id) REFERENCES osf_basefilenode DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE osf_basefilenode_versions ALTER COLUMN basefilenode_id + SET + DATA TYPE INTEGER; + ALTER TABLE osf_basefilenode_versions ALTER COLUMN fileversion_id + SET + NOT NULL; + ALTER TABLE osf_basefilenode_versions ALTER COLUMN fileversion_id + SET + DATA TYPE INTEGER; + ALTER TABLE osf_basefilenode_versions ALTER COLUMN basefilenode_id + SET + NOT NULL; + ALTER TABLE osf_basefilenode_versions ADD CONSTRAINT osf_basefilenode__fileversion_id_93etanfc_fk_osf_fileversion_id FOREIGN KEY (fileversion_id) REFERENCES osf_fileversion DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE osf_basefilenode_versions ADD CONSTRAINT osf_basefilenode__fileversion_uniq564 UNIQUE (basefilenode_id, fileversion_id); + CREATE INDEX + ON osf_basefilenode_versions (basefilenode_id, fileversion_id); + CREATE INDEX + ON osf_basefilenode_versions (basefilenode_id); + CREATE INDEX + ON osf_basefilenode_versions (fileversion_id); + """ + with connection.cursor() as cursor: + cursor.execute(sql) + + +def populate_fileversion_name(state, schema): + + sql = """ + DROP TABLE osf_basefileversionsthrough; + CREATE TABLE osf_basefileversionsthrough AS + SELECT + obfv.basefilenode_id, + obfv.fileversion_id, + ob.name as version_name + FROM + osf_basefilenode_versions obfv + LEFT JOIN + osf_basefilenode ob + ON obfv.basefilenode_id = ob.id; + ALTER TABLE osf_basefileversionsthrough ADD COLUMN id SERIAL PRIMARY KEY; + ALTER TABLE osf_basefileversionsthrough ADD CONSTRAINT osf_basefilenod_basefilenode_id_b0nwad27_fk_osf_basefilenode_id FOREIGN KEY (basefilenode_id) REFERENCES osf_basefilenode DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE osf_basefileversionsthrough ALTER COLUMN basefilenode_id + SET + DATA TYPE INTEGER; + ALTER TABLE osf_basefileversionsthrough ALTER COLUMN fileversion_id + SET + NOT NULL; + ALTER TABLE osf_basefileversionsthrough ALTER COLUMN fileversion_id + SET + DATA TYPE INTEGER; + ALTER TABLE osf_basefileversionsthrough ALTER COLUMN basefilenode_id + SET + NOT NULL; + ALTER TABLE osf_basefileversionsthrough ADD CONSTRAINT osf_basefilenode__fileversion_id_93nwadfc_fk_osf_fileversion_id FOREIGN KEY (fileversion_id) REFERENCES osf_fileversion DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE osf_basefileversionsthrough ADD CONSTRAINT osf_basefilenode__fileversion_uniq UNIQUE (basefilenode_id, fileversion_id); + CREATE INDEX + ON osf_basefileversionsthrough (basefilenode_id, fileversion_id); + CREATE INDEX + ON osf_basefileversionsthrough (basefilenode_id); + CREATE INDEX + ON osf_basefileversionsthrough (fileversion_id); + """ + + with connection.cursor() as cursor: + cursor.execute(sql) + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0182_add_custom_file_versions_through'), + ] + + operations = [ + migrations.RunPython(populate_fileversion_name, restore_default_through_table) + ] diff --git a/osf/migrations/0184_remove_basefilenode_versions.py b/osf/migrations/0184_remove_basefilenode_versions.py new file mode 100644 index 00000000000..7218a83c71a --- /dev/null +++ b/osf/migrations/0184_remove_basefilenode_versions.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-04-08 00:51 +from __future__ import unicode_literals + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0183_populate_file_versions_through'), + ] + + operations = [ + migrations.RemoveField( + model_name='basefilenode', + name='versions', + ), + ] diff --git a/osf/migrations/0185_basefilenode_versions.py b/osf/migrations/0185_basefilenode_versions.py new file mode 100644 index 00000000000..c910787a7c4 --- /dev/null +++ b/osf/migrations/0185_basefilenode_versions.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-04-08 00:52 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0184_remove_basefilenode_versions'), + ] + + operations = [ + migrations.AddField( + model_name='basefilenode', + name='versions', + field=models.ManyToManyField(through='osf.BaseFileVersionsThrough', to='osf.FileVersion'), + ), + ] diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 45519591223..b4caeab6ff4 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -31,6 +31,7 @@ from osf.models.identifiers import Identifier # noqa from osf.models.files import ( # noqa BaseFileNode, + BaseFileVersionsThrough, File, Folder, # noqa FileVersion, TrashedFile, TrashedFileNode, TrashedFolder, FileVersionUserMetadata, # noqa ) # noqa diff --git a/osf/models/files.py b/osf/models/files.py index 63884ce8acd..2f143cf109b 100644 --- a/osf/models/files.py +++ b/osf/models/files.py @@ -89,7 +89,7 @@ class BaseFileNode(TypedModel, CommentableMixin, OptionalGuidMixin, Taggable, Ob # Add regardless it can be pinned to a version or not _history = DateTimeAwareJSONField(default=list, blank=True) # A concrete version of a FileNode, must have an identifier - versions = models.ManyToManyField('FileVersion') + versions = models.ManyToManyField('FileVersion', through='BaseFileVersionsThrough') target_content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) target_object_id = models.PositiveIntegerField() @@ -234,6 +234,18 @@ def to_storage(self, **kwargs): storage.pop(key) return storage + def add_version(self, version, name=None): + """ + Relates the file object to the version object. + :param version: Version object + :param name: Name, optional. Pass in if this version needs to have + a different name than the file + :return: Returns version that was passed in + """ + version_name = name or self.name + BaseFileVersionsThrough.objects.create(fileversion=version, basefilenode=self, version_name=version_name) + return version + @classmethod def files_checked_out(cls, user): """ @@ -359,10 +371,18 @@ def copy_under(self, destination_parent, name=None): return utils.copy_files(self, destination_parent.target, destination_parent, name=name) def move_under(self, destination_parent, name=None): + renaming = name != self.name self.name = name or self.name self.parent = destination_parent self._update_node(save=True) # Trust _update_node to save us + if renaming and self.is_file and self.versions.exists(): + newest_version = self.versions.first() + node_file_version = newest_version.get_basefilenode_version(self) + # Rename version in through table + node_file_version.version_name = self.name + node_file_version.save() + return self def belongs_to_node(self, target_id): @@ -443,6 +463,7 @@ def __repr__(self): self.target ) + class UnableToRestore(Exception): pass @@ -458,7 +479,7 @@ def kind(self): def update(self, revision, data, user=None, save=True): """Using revision and data update all data pretaining to self :param str or None revision: The revision that data points to - :param dict data: Metadata recieved from waterbutler + :param dict data: Metadata received from waterbutler :returns: FileVersion """ self.name = data['name'] @@ -479,7 +500,8 @@ def update(self, revision, data, user=None, save=True): # Dont save the latest information if revision is not None: version.save() - self.versions.add(version) + # Adds version to the list of file versions - using custom through table + self.add_version(version) for entry in self.history: # Some entry might have an undefined modified field if data['modified'] is not None and entry['modified'] is not None and data['modified'] < entry['modified']: @@ -750,6 +772,11 @@ def archive(self): def is_duplicate(self, other): return self.location_hash == other.location_hash + def get_basefilenode_version(self, file): + # Returns the throughtable object - the record that links this version + # to the given file. + return self.basefileversionsthrough_set.filter(basefilenode=file).first() + def update_metadata(self, metadata, save=True): self.metadata.update(metadata) # metadata has no defined structure so only attempt to set attributes @@ -806,3 +833,15 @@ def serialize_waterbutler_settings(self, node_id, root_id): class Meta: ordering = ('-created',) + + +class BaseFileVersionsThrough(models.Model): + basefilenode = models.ForeignKey(BaseFileNode, db_index=True) + fileversion = models.ForeignKey(FileVersion, db_index=True) + version_name = models.TextField(blank=True) + + class Meta: + unique_together = (('basefilenode', 'fileversion'),) + index_together = ( + ('basefilenode', 'fileversion', ) + ) diff --git a/osf/models/identifiers.py b/osf/models/identifiers.py index e6149452353..5d871a74c0d 100644 --- a/osf/models/identifiers.py +++ b/osf/models/identifiers.py @@ -51,10 +51,10 @@ def request_identifier(self, category): if client: return client.create_identifier(self, category) - def request_identifier_update(self, category, status=None): + def request_identifier_update(self, category): client = self.get_doi_client() if client: - return client.update_identifier(self, category, status=status) + return client.update_identifier(self, category) def get_identifier(self, category): """Returns None of no identifier matches""" diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 8fd6a9b30ac..3f06ba6da20 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1656,10 +1656,21 @@ def confirm_spam(self, save=False): self.save() def _get_spam_content(self, saved_fields): + """ + This function retrieves retrieves strings of potential spam from various DB fields. Also here we can follow + django's typical ORM query structure for example we can grab the redirect link of a node by giving a saved + field of {'addons_forward_node_settings__url'}. + + :param saved_fields: set + :return: str + """ spam_fields = self.get_spam_fields(saved_fields) content = [] for field in spam_fields: - content.append((getattr(self, field, None) or '').encode('utf-8')) + exclude_null = {field + '__isnull': False} + values = list(self.__class__.objects.filter(id=self.id, **exclude_null).values_list(field, flat=True)) + if values: + content.append((' '.join(values) or '').encode('utf-8')) if self.all_tags.exists(): content.extend([name.encode('utf-8') for name in self.all_tags.values_list('name', flat=True)]) if not content: diff --git a/osf/models/node.py b/osf/models/node.py index 4752321487f..f5cb9735edd 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -282,6 +282,7 @@ class AbstractNode(DirtyFieldsMixin, TypedModel, AddonModelMixin, IdentifierMixi SPAM_CHECK_FIELDS = { 'title', 'description', + 'addons_forward_node_settings__url' # the often spammed redirect URL } # Fields that are writable by Node.update @@ -1314,8 +1315,7 @@ def set_privacy(self, permissions, auth=None, log=True, save=True, meeting_creat # Update existing identifiers if self.get_identifier('doi'): - doi_status = 'unavailable' if permissions == 'private' else 'public' - enqueue_task(update_doi_metadata_on_change.s(self._id, status=doi_status)) + enqueue_task(update_doi_metadata_on_change.s(self._id)) if log: action = NodeLog.MADE_PUBLIC if permissions == 'public' else NodeLog.MADE_PRIVATE diff --git a/osf/models/user.py b/osf/models/user.py index 956ea44fa9b..2d638c122c6 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -376,6 +376,10 @@ class OSFUser(DirtyFieldsMixin, GuidMixin, BaseModel, AbstractBaseUser, Permissi # whether the user has requested to deactivate their account requested_deactivation = models.BooleanField(default=False) + # whether the user has who requested deactivation has been contacted about their pending request. This is reset when + # requests are canceled + contacted_deactivation = models.BooleanField(default=False) + affiliated_institutions = models.ManyToManyField('Institution', blank=True) notifications_configured = DateTimeAwareJSONField(default=dict, blank=True) @@ -1803,6 +1807,26 @@ def gdpr_delete(self): self.external_identity = {} self.deleted = timezone.now() + @property + def has_resources(self): + """ + This is meant to determine if a user has any resources, nodes, preprints etc that might impede their deactivation. + If a user only has no resources or only deleted resources this will return false and they can safely be deactivated + otherwise they must delete or transfer their outstanding resources. + + :return bool: does the user have any active node, preprints, groups, quickfiles etc? + """ + from osf.models import Preprint + + # TODO: Update once quickfolders in merged + + nodes = self.nodes.exclude(type='osf.quickfilesnode').exclude(is_deleted=True).exists() + quickfiles = self.nodes.get(type='osf.quickfilesnode').files.exists() + groups = self.osf_groups.exists() + preprints = Preprint.objects.filter(_contributors=self, ever_public=True, deleted__isnull=True).exists() + + return groups or nodes or quickfiles or preprints + class Meta: # custom permissions for use in the OSF Admin App permissions = ( diff --git a/osf_tests/factories.py b/osf_tests/factories.py index 61f27c21697..b8499447e31 100644 --- a/osf_tests/factories.py +++ b/osf_tests/factories.py @@ -21,7 +21,6 @@ from website.notifications.constants import NOTIFICATION_TYPES from osf.utils import permissions from website.archiver import ARCHIVER_SUCCESS -from website.identifiers.utils import parse_identifiers from website.settings import FAKE_EMAIL_NAME, FAKE_EMAIL_DOMAIN from framework.auth.core import Auth @@ -601,23 +600,9 @@ def _create(cls, target_class, *args, **kwargs): def sync_set_identifiers(preprint): - from website.identifiers.clients import EzidClient from website import settings - client = preprint.get_doi_client() - - if isinstance(client, EzidClient): - doi_value = settings.DOI_FORMAT.format(prefix=settings.EZID_DOI_NAMESPACE, guid=preprint._id) - ark_value = '{ark}osf.io/{guid}'.format(ark=settings.EZID_ARK_NAMESPACE, guid=preprint._id) - return_value = {'success': '{} | {}'.format(doi_value, ark_value)} - else: - return_value = {'doi': settings.DOI_FORMAT.format(prefix=preprint.provider.doi_prefix, guid=preprint._id)} - - doi_client_return_value = { - 'response': return_value, - 'already_exists': False - } - id_dict = parse_identifiers(doi_client_return_value) - preprint.set_identifier_values(doi=id_dict['doi']) + doi = settings.DOI_FORMAT.format(prefix=preprint.provider.doi_prefix, guid=preprint._id) + preprint.set_identifier_values(doi=doi) class PreprintFactory(DjangoModelFactory): diff --git a/package.json b/package.json index c5ea698507d..bc9777b8ec4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "OSF", - "version": "19.24.0", + "version": "19.26.0", "description": "Facilitating Open Science", "repository": "https://github.com/CenterForOpenScience/osf.io", "author": "Center for Open Science", diff --git a/scripts/create_fakes.py b/scripts/create_fakes.py index f010da5a61d..9ec3cb6498b 100644 --- a/scripts/create_fakes.py +++ b/scripts/create_fakes.py @@ -311,8 +311,6 @@ def create_fake_project(creator, n_users, privacy, n_components, name, n_tags, p if not provider: provider = PreprintProviderFactory(name=fake.science_word()) privacy = 'public' - mock_change_identifier = mock.patch('website.identifiers.client.EzidClient.update_identifier') - mock_change_identifier.start() mock_change_identifier_preprints = mock.patch('website.identifiers.client.CrossRefClient.update_identifier') mock_change_identifier_preprints.start() project = PreprintFactory(title=project_title, description=fake.science_paragraph(), creator=creator, provider=provider) diff --git a/scripts/tests/test_deactivate_requested_accounts.py b/scripts/tests/test_deactivate_requested_accounts.py new file mode 100644 index 00000000000..300f9ea5570 --- /dev/null +++ b/scripts/tests/test_deactivate_requested_accounts.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- + +import pytest +import mock + +from nose.tools import * # noqa + +from osf_tests.factories import ProjectFactory, AuthUserFactory + +from osf.management.commands.deactivate_requested_accounts import deactivate_requested_accounts + +from website import mails, settings + +@pytest.mark.django_db +@pytest.mark.enable_quickfiles_creation +class TestDeactivateRequestedAccount: + + @pytest.fixture() + def user_requested_deactivation(self): + user = AuthUserFactory(requested_deactivation=True) + user.requested_deactivation = True + user.save() + return user + + @pytest.fixture() + def user_requested_deactivation_with_node(self): + user = AuthUserFactory(requested_deactivation=True) + node = ProjectFactory(creator=user) + node.save() + user.save() + return user + + @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.send_mail') + def test_deactivate_user_with_no_content(self, mock_mail, user_requested_deactivation): + + deactivate_requested_accounts(dry_run=False) + user_requested_deactivation.reload() + + assert user_requested_deactivation.requested_deactivation + assert user_requested_deactivation.contacted_deactivation + assert user_requested_deactivation.is_disabled + mock_mail.assert_called_with(can_change_preferences=False, + mail=mails.REQUEST_DEACTIVATION_COMPLETE, + to_addr=user_requested_deactivation.username, + contact_email=settings.OSF_CONTACT_EMAIL, + user=user_requested_deactivation) + + @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.send_mail') + def test_deactivate_user_with_content(self, mock_mail, user_requested_deactivation_with_node): + + deactivate_requested_accounts(dry_run=False) + user_requested_deactivation_with_node.reload() + + assert user_requested_deactivation_with_node.requested_deactivation + assert not user_requested_deactivation_with_node.is_disabled + mock_mail.assert_called_with(can_change_preferences=False, + mail=mails.REQUEST_DEACTIVATION, + to_addr=settings.OSF_SUPPORT_EMAIL, + user=user_requested_deactivation_with_node) + diff --git a/tests/identifiers/test_datacite.py b/tests/identifiers/test_datacite.py index 7da7cc49cb1..18a9dffbe1b 100644 --- a/tests/identifiers/test_datacite.py +++ b/tests/identifiers/test_datacite.py @@ -11,10 +11,7 @@ from framework.auth import Auth from website import settings -from website.app import init_addons from website.identifiers.clients import DataCiteClient -from website.identifiers.clients.datacite import DataCiteMDSClient -from website.identifiers import metadata from website.identifiers.utils import request_identifiers from tests.base import OsfTestCase diff --git a/tests/identifiers/test_ezid.py b/tests/identifiers/test_ezid.py deleted file mode 100644 index 8ef0f1f3bb8..00000000000 --- a/tests/identifiers/test_ezid.py +++ /dev/null @@ -1,134 +0,0 @@ -import furl -import mock -import pytest -import responses -from waffle.testutils import override_switch -from nose.tools import * # noqa - -from tests.base import OsfTestCase -from tests.test_addons import assert_urls_equal -from osf_tests.factories import AuthUserFactory, RegistrationFactory - -from website import settings -from website.app import init_addons -from website.identifiers.utils import to_anvl -from website.identifiers.clients import EzidClient - - -@pytest.mark.django_db -class TestEZIDClient(OsfTestCase): - - def setUp(self): - super(TestEZIDClient, self).setUp() - self.user = AuthUserFactory() - self.registration = RegistrationFactory(creator=self.user, is_public=True) - self.client = EzidClient(base_url='https://test.ezid.osf.io', prefix=settings.EZID_DOI_NAMESPACE.replace('doi:', '')) - - @override_switch('ezid', active=True) - @responses.activate - def test_create_identifiers_not_exists_ezid(self): - guid = self.registration._id - url = furl.furl(self.client.base_url) - doi = settings.DOI_FORMAT.format(prefix=settings.EZID_DOI_NAMESPACE, guid=guid).replace('doi:', '') - url.path.segments += ['id', doi] - responses.add( - responses.Response( - responses.PUT, - url.url, - body=to_anvl({ - 'success': '{doi}osf.io/{ident} | {ark}osf.io/{ident}'.format( - doi=settings.EZID_DOI_NAMESPACE, - ark=settings.EZID_ARK_NAMESPACE, - ident=guid, - ), - }), - status=201, - ) - ) - with mock.patch('osf.models.Registration.get_doi_client') as mock_get_doi: - mock_get_doi.return_value = self.client - res = self.app.post( - self.registration.api_url_for('node_identifiers_post'), - auth=self.user.auth, - ) - self.registration.reload() - assert_equal( - res.json['doi'], - self.registration.get_identifier_value('doi') - ) - - assert_equal(res.status_code, 201) - - @override_switch('ezid', active=True) - @responses.activate - def test_create_identifiers_exists_ezid(self): - guid = self.registration._id - doi = settings.DOI_FORMAT.format(prefix=settings.EZID_DOI_NAMESPACE, guid=guid).replace('doi:', '') - url = furl.furl(self.client.base_url) - url.path.segments += ['id', doi] - responses.add( - responses.Response( - responses.PUT, - url.url, - body='identifier already exists', - status=400, - ) - ) - responses.add( - responses.Response( - responses.GET, - url.url, - body=to_anvl({ - 'success': doi, - }), - status=200, - ) - ) - with mock.patch('osf.models.Registration.get_doi_client') as mock_get_doi: - mock_get_doi.return_value = self.client - res = self.app.post( - self.registration.api_url_for('node_identifiers_post'), - auth=self.user.auth, - ) - self.registration.reload() - assert_equal( - res.json['doi'], - self.registration.get_identifier_value('doi') - ) - assert_equal(res.status_code, 201) - - @responses.activate - def test_get_by_identifier(self): - self.registration.set_identifier_value('doi', 'FK424601') - self.registration.set_identifier_value('ark', 'fk224601') - res_doi = self.app.get( - self.registration.web_url_for( - 'get_referent_by_identifier', - category='doi', - value=self.registration.get_identifier_value('doi'), - ), - ) - assert_equal(res_doi.status_code, 302) - assert_urls_equal(res_doi.headers['Location'], self.registration.absolute_url) - res_ark = self.app.get( - self.registration.web_url_for( - 'get_referent_by_identifier', - category='ark', - value=self.registration.get_identifier_value('ark'), - ), - ) - assert_equal(res_ark.status_code, 302) - assert_urls_equal(res_ark.headers['Location'], self.registration.absolute_url) - - @responses.activate - def test_get_by_identifier_not_found(self): - self.registration.set_identifier_value('doi', 'FK424601') - res = self.app.get( - self.registration.web_url_for( - 'get_referent_by_identifier', - category='doi', - value='fakedoi', - ), - expect_errors=True, - ) - assert_equal(res.status_code, 404) diff --git a/tests/identifiers/test_identifiers.py b/tests/identifiers/test_identifiers.py index 5b47f0e95f2..edf4339dc11 100644 --- a/tests/identifiers/test_identifiers.py +++ b/tests/identifiers/test_identifiers.py @@ -2,130 +2,15 @@ from nose.tools import * # noqa from django.db import IntegrityError -from waffle.testutils import override_switch from osf_tests.factories import ( - SubjectFactory, - AuthUserFactory, - PreprintFactory, IdentifierFactory, RegistrationFactory, - PreprintProviderFactory ) from tests.base import OsfTestCase -import lxml.etree - -from website import settings -from website.identifiers import metadata -from osf.models import Identifier, Subject, NodeLicense - - -class TestMetadataGeneration(OsfTestCase): - - def setUp(self): - OsfTestCase.setUp(self) - self.visible_contrib = AuthUserFactory() - visible_contrib2 = AuthUserFactory(given_name=u'ヽ༼ ಠ益ಠ ༽ノ', family_name=u'ლ(´◉❥◉`ლ)') - self.invisible_contrib = AuthUserFactory() - self.node = RegistrationFactory(is_public=True) - self.identifier = Identifier(referent=self.node, category='catid', value='cat:7') - self.node.add_contributor(self.visible_contrib, visible=True) - self.node.add_contributor(self.invisible_contrib, visible=False) - self.node.add_contributor(visible_contrib2, visible=True) - self.node.save() - - # This test is not used as datacite is currently used for nodes, leaving here for future reference - def test_datacite_metadata_for_preprint_has_correct_structure(self): - provider = PreprintProviderFactory() - license = NodeLicense.objects.get(name='CC-By Attribution 4.0 International') - license_details = { - 'id': license.license_id, - 'year': '2017', - 'copyrightHolders': ['Jeff Hardy', 'Matt Hardy'] - } - preprint = PreprintFactory(provider=provider, project=self.node, is_published=True, license_details=license_details) - metadata_xml = metadata.datacite_metadata_for_preprint(preprint, doi=preprint.get_identifier('doi').value, pretty_print=True) - - root = lxml.etree.fromstring(metadata_xml) - xsi_location = '{http://www.w3.org/2001/XMLSchema-instance}schemaLocation' - expected_location = 'http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4/metadata.xsd' - assert root.attrib[xsi_location] == expected_location - - identifier = root.find('{%s}identifier' % metadata.NAMESPACE) - assert identifier.attrib['identifierType'] == 'DOI' - assert identifier.text == preprint.get_identifier('doi').value - - creators = root.find('{%s}creators' % metadata.NAMESPACE) - assert len(creators.getchildren()) == len(preprint.visible_contributors) - - subjects = root.find('{%s}subjects' % metadata.NAMESPACE) - assert subjects.getchildren() - - publisher = root.find('{%s}publisher' % metadata.NAMESPACE) - assert publisher.text == provider.name - - pub_year = root.find('{%s}publicationYear' % metadata.NAMESPACE) - assert pub_year.text == str(preprint.date_published.year) - - dates = root.find('{%s}dates' % metadata.NAMESPACE).getchildren()[0] - assert dates.text == preprint.modified.isoformat() - assert dates.attrib['dateType'] == 'Updated' - - alternate_identifier = root.find('{%s}alternateIdentifiers' % metadata.NAMESPACE).getchildren()[0] - assert alternate_identifier.text == settings.DOMAIN + preprint._id - assert alternate_identifier.attrib['alternateIdentifierType'] == 'URL' - - descriptions = root.find('{%s}descriptions' % metadata.NAMESPACE).getchildren()[0] - assert descriptions.text == preprint.description - - rights = root.find('{%s}rightsList' % metadata.NAMESPACE).getchildren()[0] - assert rights.text == preprint.license.name - - # This test is not used as datacite is currently used for nodes, leaving here for future reference - def test_datacite_format_creators_for_preprint(self): - preprint = PreprintFactory(project=self.node, is_published=True) - - verified_user = AuthUserFactory(external_identity={'ORCID': {'1234-1234-1234-1234': 'VERIFIED'}}) - linked_user = AuthUserFactory(external_identity={'ORCID': {'1234-nope-1234-nope': 'LINK'}}) - preprint.add_contributor(verified_user, visible=True) - preprint.add_contributor(linked_user, visible=True) - preprint.save() - - formatted_creators = metadata.format_creators(preprint) - - contributors_with_orcids = 0 - guid_identifiers = [] - for creator_xml in formatted_creators: - assert creator_xml.find('creatorName').text != u'{}, {}'.format(self.invisible_contrib.family_name, self.invisible_contrib.given_name) - - name_identifiers = creator_xml.findall('nameIdentifier') - - for name_identifier in name_identifiers: - if name_identifier.attrib['nameIdentifierScheme'] == 'ORCID': - assert name_identifier.attrib['schemeURI'] == 'http://orcid.org/' - contributors_with_orcids += 1 - else: - guid_identifiers.append(name_identifier.text) - assert name_identifier.attrib['nameIdentifierScheme'] == 'OSF' - assert name_identifier.attrib['schemeURI'] == settings.DOMAIN - - assert contributors_with_orcids >= 1 - assert len(formatted_creators) == len(self.node.visible_contributors) - assert sorted(guid_identifiers) == sorted([contrib.absolute_url for contrib in preprint.visible_contributors]) - - # This test is not used as datacite is currently used for nodes, leaving here for future reference - def test_datacite_format_subjects_for_preprint(self): - subject = SubjectFactory() - subject_1 = SubjectFactory(parent=subject) - subject_2 = SubjectFactory(parent=subject) - - subjects = [[subject._id, subject_1._id], [subject._id, subject_2._id]] - preprint = PreprintFactory(subjects=subjects, project=self.node, is_published=True) - - formatted_subjects = metadata.format_subjects(preprint) - assert len(formatted_subjects) == Subject.objects.all().count() +from osf.models import Identifier class TestIdentifierModel(OsfTestCase): diff --git a/tests/test_addons.py b/tests/test_addons.py index 47fe07a1bd8..df3d43c7c2f 100644 --- a/tests/test_addons.py +++ b/tests/test_addons.py @@ -171,7 +171,7 @@ def test_action_downloads_marks_version_as_seen(self): # Add a new version, make sure that does not have a record version = FileVersionFactory() - test_file.versions.add(version) + test_file.add_version(version) test_file.save() versions = test_file.versions.order_by('created') @@ -738,7 +738,7 @@ def get_test_file(self): materialized_path='/test/Test', ) ret.save() - ret.versions.add(version) + ret.add_version(version) return ret def get_second_test_file(self): @@ -751,7 +751,7 @@ def get_second_test_file(self): materialized_path='/test/Test2', ) ret.save() - ret.versions.add(version) + ret.add_version(version) return ret def get_uppercased_ext_test_file(self): @@ -764,7 +764,7 @@ def get_uppercased_ext_test_file(self): materialized_path='/test/Test2', ) ret.save() - ret.versions.add(version) + ret.add_version(version) return ret def get_ext_test_file(self): @@ -777,7 +777,7 @@ def get_ext_test_file(self): materialized_path='/test/Test2', ) ret.save() - ret.versions.add(version) + ret.add_version(version) return ret def get_mako_return(self): diff --git a/tests/test_views.py b/tests/test_views.py index bd861ec0431..544b95e99fd 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -1802,15 +1802,6 @@ def test_user_cannot_request_account_export_before_throttle_expires(self, send_m assert_equal(res.status_code, 400) assert_equal(send_mail.call_count, 1) - @mock.patch('framework.auth.views.mails.send_mail') - def test_user_cannot_request_account_deactivation_before_throttle_expires(self, send_mail): - url = api_url_for('request_deactivation') - self.app.post(url, auth=self.user.auth) - assert_true(send_mail.called) - res = self.app.post(url, auth=self.user.auth, expect_errors=True) - assert_equal(res.status_code, 400) - assert_equal(send_mail.call_count, 1) - def test_get_unconfirmed_emails_exclude_external_identity(self): external_identity = { 'service': { diff --git a/tests/test_websitefiles.py b/tests/test_websitefiles.py index e4e0c5231f4..ab51f807227 100644 --- a/tests/test_websitefiles.py +++ b/tests/test_websitefiles.py @@ -12,6 +12,7 @@ from tests.base import OsfTestCase from osf_tests.factories import AuthUserFactory, ProjectFactory from website.files import exceptions +from website.files.utils import attach_versions from osf import models @@ -495,7 +496,7 @@ def test_get_version(self): ) file.save() - file.versions.add(*[v1, v2]) + attach_versions(file, [v1, v2]) assert_equals(file.get_version('1'), v1) assert_equals(file.get_version('2', required=True), v2) @@ -519,7 +520,7 @@ def test_update_version_metadata(self): file.save() - file.versions.add(v1) + file.add_version(v1) file.update_version_metadata(None, {'size': 1337}) with assert_raises(exceptions.VersionNotFoundError): diff --git a/website/files/utils.py b/website/files/utils.py index 022fcfc8706..cff82293458 100644 --- a/website/files/utils.py +++ b/website/files/utils.py @@ -6,6 +6,7 @@ def copy_files(src, target_node, parent=None, name=None): :param Folder parent: The parent of to attach the clone of src to, if applicable """ assert not parent or not parent.is_file, 'Parent must be a folder' + renaming = src.name != name cloned = src.clone() cloned.parent = parent @@ -14,20 +15,26 @@ def copy_files(src, target_node, parent=None, name=None): cloned.copied_from = src cloned.save() - if src.is_file and src.versions.exists(): fileversions = src.versions.select_related('region').order_by('-created') most_recent_fileversion = fileversions.first() if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region: # add all original version except the most recent - cloned.versions.add(*fileversions[1:]) + attach_versions(cloned, fileversions[1:], src) # create a new most recent version and update the region before adding new_fileversion = most_recent_fileversion.clone() new_fileversion.region = target_node.osfstorage_region new_fileversion.save() - cloned.versions.add(new_fileversion) + attach_versions(cloned, [new_fileversion], src) else: - cloned.versions.add(*src.versions.all()) + attach_versions(cloned, src.versions.all(), src) + + if renaming: + latest_version = cloned.versions.first() + node_file_version = latest_version.get_basefilenode_version(cloned) + # If this is a copy and a rename, update the name on the through table + node_file_version.version_name = cloned.name + node_file_version.save() # copy over file metadata records if cloned.provider == 'osfstorage': @@ -40,3 +47,13 @@ def copy_files(src, target_node, parent=None, name=None): copy_files(child, target_node, parent=cloned) return cloned + +def attach_versions(file, versions_list, src=None): + """ + Loops through all versions in the versions list and attaches them to the file. + Fetches the name associated with the versions on the original src, if copying. + """ + for version in versions_list: + original_version = version.get_basefilenode_version(src) + name = original_version.version_name if original_version else None + file.add_version(version, name) diff --git a/website/identifiers/clients/__init__.py b/website/identifiers/clients/__init__.py index 60f1d0ec489..6dd707891fc 100644 --- a/website/identifiers/clients/__init__.py +++ b/website/identifiers/clients/__init__.py @@ -1,3 +1,2 @@ from .crossref import CrossRefClient, ECSArXivCrossRefClient # noqa from .datacite import DataCiteClient # noqa -from .ezid import EzidClient # noqa diff --git a/website/identifiers/clients/crossref.py b/website/identifiers/clients/crossref.py index a1d65e98496..da8bc00bb59 100644 --- a/website/identifiers/clients/crossref.py +++ b/website/identifiers/clients/crossref.py @@ -8,7 +8,7 @@ from django.db.models import QuerySet from framework.auth.utils import impute_names -from website.identifiers.metadata import remove_control_characters +from website.identifiers.utils import remove_control_characters from website.identifiers.clients.base import AbstractIdentifierClient from website import settings @@ -214,9 +214,9 @@ def _build_url(self, **query): url.args.update(query) return url.url - def create_identifier(self, preprint, category, status=None, include_relation=True): - if status is None: - status = self.get_status(preprint) + def create_identifier(self, preprint, category, include_relation=True): + status = self.get_status(preprint) + if category == 'doi': metadata = self.build_metadata(preprint, status, include_relation) doi = self.build_doi(preprint) @@ -241,8 +241,8 @@ def create_identifier(self, preprint, category, status=None, include_relation=Tr else: raise NotImplementedError() - def update_identifier(self, preprint, category, status=None): - return self.create_identifier(preprint, category, status) + def update_identifier(self, preprint, category): + return self.create_identifier(preprint, category) def get_status(self, preprint): return 'public' if preprint.verified_publishable and not preprint.is_retracted else 'unavailable' diff --git a/website/identifiers/clients/datacite.py b/website/identifiers/clients/datacite.py index 09376c10fb2..1d2cb917bef 100644 --- a/website/identifiers/clients/datacite.py +++ b/website/identifiers/clients/datacite.py @@ -85,7 +85,7 @@ def create_identifier(self, node, category): else: raise NotImplementedError('Creating an identifier with category {} is not supported'.format(category)) - def update_identifier(self, node, category, status=None): + def update_identifier(self, node, category): if not node.is_public or node.is_deleted: if category == 'doi': doi = self.build_doi(node) diff --git a/website/identifiers/clients/ezid.py b/website/identifiers/clients/ezid.py deleted file mode 100644 index fe9c38655a4..00000000000 --- a/website/identifiers/clients/ezid.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import furl -import requests - -from website import settings -from website.identifiers import utils -from website.util.client import BaseClient -from website.identifiers.clients import DataCiteClient, exceptions - -logger = logging.getLogger(__name__) - - -class EzidClient(BaseClient, DataCiteClient): - """Inherits _make_request from BaseClient""" - - def _build_url(self, *segments, **query): - url = furl.furl(self.base_url) - url.path.segments.extend(segments) - url.args.update(query) - return url.url - - @property - def _default_headers(self): - return {'Content-Type': 'text/plain; charset=UTF-8'} - - def build_doi(self, object): - return settings.DOI_FORMAT.format(prefix=self.prefix, guid=object._id) - - def get_identifier(self, identifier): - resp = self._make_request( - 'GET', - self._build_url('id', identifier), - expects=(200, ), - ) - return utils.from_anvl(resp.content.strip('\n')) - - def create_identifier(self, object, category): - if category in ['doi', 'ark']: - metadata = self.build_metadata(object) - doi = self.build_doi(object) - resp = requests.request( - 'PUT', - self._build_url('id', doi), - data=utils.to_anvl(metadata or {}), - ) - if resp.status_code != 201: - if 'identifier already exists' in resp.content: - raise exceptions.IdentifierAlreadyExists() - else: - raise exceptions.ClientResponseError(resp) - resp = utils.from_anvl(resp.content) - return dict( - [each.strip('/') for each in pair.strip().split(':')] - for pair in resp['success'].split('|') - ) - else: - raise NotImplementedError('Create identifier method is not supported for category {}'.format(category)) - - def update_identifier(self, object, category): - metadata = self.build_metadata(object) - status = self.get_status(object) - metadata['_status'] = status - identifier = self.build_doi(object) - resp = self._make_request( - 'POST', - self._build_url('id', identifier), - data=utils.to_anvl(metadata or {}), - expects=(200, ), - ) - return utils.from_anvl(resp.content) - - def get_status(self, object): - from osf.models import Preprint - - if isinstance(object, Preprint): - status = 'public' if object.verified_publishable else 'unavailable' - else: - status = 'public' if object.is_public or not object.is_deleted else 'unavailable' - return status diff --git a/website/identifiers/listeners.py b/website/identifiers/listeners.py index 6d3307d7dde..b0acb6efb06 100644 --- a/website/identifiers/listeners.py +++ b/website/identifiers/listeners.py @@ -7,4 +7,4 @@ def update_status_on_delete(node): from website.identifiers.tasks import update_doi_metadata_on_change if node.get_identifier('doi'): - enqueue_task(update_doi_metadata_on_change.s(node._id, status='unavailable')) + enqueue_task(update_doi_metadata_on_change.s(node._id)) diff --git a/website/identifiers/metadata.py b/website/identifiers/metadata.py deleted file mode 100644 index 2bfa4623321..00000000000 --- a/website/identifiers/metadata.py +++ /dev/null @@ -1,127 +0,0 @@ -# -*- coding: utf-8 -*- -import unicodedata -import lxml.etree -import lxml.builder - -from website import settings - -NAMESPACE = 'http://datacite.org/schema/kernel-4' -XSI = 'http://www.w3.org/2001/XMLSchema-instance' -SCHEMA_LOCATION = 'http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4/metadata.xsd' -E = lxml.builder.ElementMaker(nsmap={ - None: NAMESPACE, - 'xsi': XSI}, -) - -CREATOR = E.creator -CREATOR_NAME = E.creatorName -SUBJECT_SCHEME = 'bepress Digital Commons Three-Tiered Taxonomy' - -# From https://stackoverflow.com/a/19016117 -# lxml does not accept strings with control characters -def remove_control_characters(s): - return ''.join(ch for ch in s if unicodedata.category(ch)[0] != 'C') - -# This function is not OSF-specific -def datacite_metadata(doi, title, creators, publisher, publication_year, pretty_print=False): - """Return the formatted datacite metadata XML as a string. - - :param str doi - :param str title - :param list creators: List of creator names, formatted like 'Shakespeare, William' - :param str publisher: Publisher name. - :param int publication_year - :param bool pretty_print - """ - creators = [CREATOR(CREATOR_NAME(each)) for each in creators] - root = E.resource( - E.resourceType('Project', resourceTypeGeneral='Text'), - E.identifier(doi, identifierType='DOI'), - E.creators(*creators), - E.titles(E.title(remove_control_characters(title))), - E.publisher(publisher), - E.publicationYear(str(publication_year)), - ) - # set xsi:schemaLocation - root.attrib['{%s}schemaLocation' % XSI] = SCHEMA_LOCATION - return lxml.etree.tostring(root, pretty_print=pretty_print) - - -def format_contributor(contributor): - return remove_control_characters(u'{}, {}'.format(contributor.family_name, contributor.given_name)) - - -# This function is OSF specific. -def datacite_metadata_for_node(node, doi, pretty_print=False): - """Return the datacite metadata XML document for a given node as a string. - - :param Node node - :param str doi - """ - creators = [format_contributor(each) for each in node.visible_contributors] - return datacite_metadata( - doi=doi, - title=node.title, - creators=creators, - publisher='Open Science Framework', - publication_year=getattr(node.registered_date or node.created, 'year'), - pretty_print=pretty_print - ) - - -def format_creators(preprint): - creators = [] - for contributor in preprint.visible_contributors: - creator = CREATOR(E.creatorName(format_contributor(contributor))) - creator.append(E.givenName(remove_control_characters(contributor.given_name))) - creator.append(E.familyName(remove_control_characters(contributor.family_name))) - creator.append(E.nameIdentifier(contributor.absolute_url, nameIdentifierScheme='OSF', schemeURI=settings.DOMAIN)) - - # contributor.external_identity = {'ORCID': {'1234-1234-1234-1234': 'VERIFIED'}} - if contributor.external_identity.get('ORCID'): - verified = contributor.external_identity['ORCID'].values()[0] == 'VERIFIED' - if verified: - creator.append(E.nameIdentifier(contributor.external_identity['ORCID'].keys()[0], nameIdentifierScheme='ORCID', schemeURI='http://orcid.org/')) - - creators.append(creator) - - return creators - - -def format_subjects(preprint): - return [E.subject(subject, subjectScheme=SUBJECT_SCHEME) for subject in preprint.subjects.values_list('text', flat=True)] - - -# This function is OSF specific. -def datacite_metadata_for_preprint(preprint, doi, pretty_print=False): - """Return the datacite metadata XML document for a given preprint as a string. - - :param preprint -- the preprint - :param str doi - """ - # NOTE: If you change *ANYTHING* here be 100% certain that the - # changes you make are also made to the SHARE serialization code. - # If the data sent out is not EXCATLY the same all the data will get jumbled up in SHARE. - # And then search results will be wrong and broken. And it will be your fault. And you'll have caused many sleepless nights. - # Don't be that person. - root = E.resource( - E.resourceType('Preprint', resourceTypeGeneral='Text'), - E.identifier(doi, identifierType='DOI'), - E.subjects(*format_subjects(preprint)), - E.creators(*format_creators(preprint)), - E.titles(E.title(remove_control_characters(preprint.title))), - E.publisher(preprint.provider.name), - E.publicationYear(str(getattr(preprint.date_published, 'year'))), - E.dates(E.date(preprint.modified.isoformat(), dateType='Updated')), - E.alternateIdentifiers(E.alternateIdentifier(settings.DOMAIN + preprint._id, alternateIdentifierType='URL')), - E.descriptions(E.description(remove_control_characters(preprint.description), descriptionType='Abstract')), - ) - - if preprint.license: - root.append(E.rightsList(E.rights(preprint.license.name))) - - if preprint.article_doi: - root.append(E.relatedIdentifiers(E.relatedIdentifier(settings.DOI_URL_PREFIX + preprint.article_doi, relatedIdentifierType='URL', relationType='IsPreviousVersionOf'))), - # set xsi:schemaLocation - root.attrib['{%s}schemaLocation' % XSI] = SCHEMA_LOCATION - return lxml.etree.tostring(root, pretty_print=pretty_print) diff --git a/website/identifiers/tasks.py b/website/identifiers/tasks.py index 1c17ed199bf..b6f5cade2f4 100644 --- a/website/identifiers/tasks.py +++ b/website/identifiers/tasks.py @@ -4,8 +4,8 @@ @celery_app.task(ignore_results=True) -def update_doi_metadata_on_change(target_guid, status): +def update_doi_metadata_on_change(target_guid): Guid = apps.get_model('osf.Guid') target_object = Guid.load(target_guid).referent if target_object.get_identifier('doi'): - target_object.request_identifier_update(category='doi', status=status) + target_object.request_identifier_update(category='doi') diff --git a/website/identifiers/utils.py b/website/identifiers/utils.py index 458c667dc97..987e3a99cbb 100644 --- a/website/identifiers/utils.py +++ b/website/identifiers/utils.py @@ -2,9 +2,9 @@ import re import logging +import unicodedata from framework.exceptions import HTTPError -from website import settings logger = logging.getLogger(__name__) @@ -28,42 +28,11 @@ def unescape(value): return re.sub(r'%[0-9A-Fa-f]{2}', decode, value) -def to_anvl(data): - if isinstance(data, dict): - return FIELD_SEPARATOR.join( - PAIR_SEPARATOR.join([escape(key), escape(to_anvl(value))]) - for key, value in data.items() - ) - return data - - -def _field_from_anvl(raw): - key, value = raw.split(PAIR_SEPARATOR) - return [unescape(key), from_anvl(unescape(value))] - - -def from_anvl(data): - if PAIR_SEPARATOR in data: - return dict([ - _field_from_anvl(pair) - for pair in data.split(FIELD_SEPARATOR) - ]) - return data - - -def merge_dicts(*dicts): - return dict(sum((each.items() for each in dicts), [])) - - def request_identifiers(target_object): """Request identifiers for the target object using the appropriate client. :param target_object: object to request identifiers for - :return: dict with keys relating to the status of the identifier - response - response from the DOI client - already_exists - the DOI has already been registered with a client - only_doi - boolean; only include the DOI (and not the ARK) identifier - when processing this response in get_or_create_identifiers + :return: dict with DOI """ from website.identifiers.clients import exceptions @@ -74,43 +43,17 @@ def request_identifiers(target_object): if not client: return doi = client.build_doi(target_object) - already_exists = False - only_doi = True try: identifiers = target_object.request_identifier(category='doi') - except exceptions.IdentifierAlreadyExists as error: + except exceptions.IdentifierAlreadyExists: identifiers = client.get_identifier(doi) - already_exists = True - only_doi = False except exceptions.ClientResponseError as error: raise HTTPError(error.response.status_code) return { - 'doi': identifiers.get('doi'), - 'already_exists': already_exists, - 'only_doi': only_doi + 'doi': identifiers.get('doi') } -def parse_identifiers(doi_client_response): - """ - Note: ARKs include a leading slash. This is stripped here to avoid multiple - consecutive slashes in internal URLs (e.g. /ids/ark//). Frontend code - that build ARK URLs is responsible for adding the leading slash. - Moved from website/project/views/register.py for use by other modules - """ - resp = doi_client_response['response'] - exists = doi_client_response.get('already_exists', None) - if exists: - doi = resp['success'] - suffix = doi.strip(settings.EZID_DOI_NAMESPACE) - return { - 'doi': doi.replace('doi:', ''), - 'ark': '{0}{1}'.format(settings.EZID_ARK_NAMESPACE.replace('ark:', ''), suffix), - } - else: - return {'doi': resp['doi']} - - def get_or_create_identifiers(target_object): """ Note: ARKs include a leading slash. This is stripped here to avoid multiple @@ -118,14 +61,18 @@ def get_or_create_identifiers(target_object): that build ARK URLs is responsible for adding the leading slash. Moved from website/project/views/register.py for use by other modules """ - response_dict = request_identifiers(target_object) - ark = target_object.get_identifier(category='ark') - doi = response_dict['doi'] + doi = request_identifiers(target_object)['doi'] if not doi: client = target_object.get_doi_client() doi = client.build_doi(target_object) - response = {'doi': doi} + + ark = target_object.get_identifier(category='ark') if ark: - response['ark'] = ark.value + return {'doi': doi, 'ark': ark} + + return {'doi': doi} - return response +# From https://stackoverflow.com/a/19016117 +# lxml does not accept strings with control characters +def remove_control_characters(s): + return ''.join(ch for ch in s if unicodedata.category(ch)[0] != 'C') diff --git a/website/mails/mails.py b/website/mails/mails.py index d51ca93a282..dd2ac56c809 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -251,6 +251,8 @@ def get_english_article(word): REQUEST_EXPORT = Mail('support_request', subject='[via OSF] Export Request') REQUEST_DEACTIVATION = Mail('support_request', subject='[via OSF] Deactivation Request') +REQUEST_DEACTIVATION_COMPLETE = Mail('request_deactivation_complete', subject='[via OSF] OSF account deactivated') + SPAM_USER_BANNED = Mail('spam_user_banned', subject='[OSF] Account flagged as spam') CONFERENCE_SUBMITTED = Mail( diff --git a/website/preprints/tasks.py b/website/preprints/tasks.py index 0692796b027..a7257a7a88e 100644 --- a/website/preprints/tasks.py +++ b/website/preprints/tasks.py @@ -48,9 +48,8 @@ def should_update_preprint_identifiers(preprint, old_subjects, saved_fields): ) def update_or_create_preprint_identifiers(preprint): - status = 'public' if preprint.verified_publishable and not preprint.is_retracted else 'unavailable' try: - preprint.request_identifier_update(category='doi', status=status) + preprint.request_identifier_update(category='doi') except HTTPError as err: sentry.log_exception() sentry.log_message(err.args[0]) diff --git a/website/profile/views.py b/website/profile/views.py index 10555ca9b1c..7b4e4eab7fc 100644 --- a/website/profile/views.py +++ b/website/profile/views.py @@ -816,20 +816,6 @@ def request_export(auth): @must_be_logged_in def request_deactivation(auth): user = auth.user - if not throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE): - raise HTTPError(http_status.HTTP_400_BAD_REQUEST, - data={ - 'message_long': 'Too many requests. Please wait a while before sending another account deactivation request.', - 'error_type': 'throttle_error' - }) - - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.REQUEST_DEACTIVATION, - user=auth.user, - can_change_preferences=False, - ) - user.email_last_sent = timezone.now() user.requested_deactivation = True user.save() return {'message': 'Sent account deactivation request'} @@ -838,5 +824,6 @@ def request_deactivation(auth): def cancel_request_deactivation(auth): user = auth.user user.requested_deactivation = False + user.contacted_deactivation = False # In case we've already contacted them once. user.save() return {'message': 'You have canceled your deactivation request'} diff --git a/website/settings/defaults.py b/website/settings/defaults.py index c9846449e90..9bfe700d72e 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -318,10 +318,6 @@ def parent_dir(path): # General Format for DOIs DOI_FORMAT = '{prefix}/osf.io/{guid}' -# ezid -EZID_DOI_NAMESPACE = 'doi:10.5072' -EZID_ARK_NAMESPACE = 'ark:99999' - # datacite DATACITE_USERNAME = None DATACITE_PASSWORD = None @@ -489,6 +485,7 @@ class CeleryConfig: 'scripts.generate_sitemap', 'scripts.premigrate_created_modified', 'scripts.add_missing_identifiers_to_preprints', + 'osf.management.commands.deactivate_requested_accounts', ) # Modules that need metrics and release requirements @@ -608,6 +605,10 @@ class CeleryConfig: 'task': 'scripts.generate_sitemap', 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. }, + 'deactivate_requested_accounts': { + 'task': 'management.commands.deactivate_requested_accounts', + 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. + }, 'check_crossref_doi': { 'task': 'management.commands.check_crossref_dois', 'schedule': crontab(minute=0, hour=4), # Daily 11:00 p.m. diff --git a/website/static/js/fangorn.js b/website/static/js/fangorn.js index ec1ad7c229e..a4db681ab58 100644 --- a/website/static/js/fangorn.js +++ b/website/static/js/fangorn.js @@ -1699,7 +1699,7 @@ function _loadTopLevelChildren() { * @this Treebeard.controller * @private */ -var NO_AUTO_EXPAND_PROJECTS = ['ezcuj', 'ecmz4', 'w4wvg', 'sn64d', 'pfdyw']; +var NO_AUTO_EXPAND_PROJECTS = ['ezcuj', 'ecmz4', 'w4wvg', 'sn64d', 'pfdyw', '4jbx4']; function expandStateLoad(item) { var tb = this, icon = $('.tb-row[data-id="' + item.id + '"]').find('.tb-toggle-icon'), diff --git a/website/static/js/filepage/revisions.js b/website/static/js/filepage/revisions.js index 0e99d81f625..c938bb68bc0 100644 --- a/website/static/js/filepage/revisions.js +++ b/website/static/js/filepage/revisions.js @@ -218,7 +218,12 @@ var FileRevisionsTable = { } if (file.provider === 'osfstorage' && file.name && index !== 0) { - var parts = file.name.split('.'); + var parts; + if (file.versionNames && file.versionNames.length) { + parts = file.versionNames[index].split('.'); + } else { + parts = file.name.split('.'); + } if (parts.length === 1) { options.displayName = parts[0] + '-' + revision.modified; } else { diff --git a/website/templates/emails/request_deactivation_complete.html.mako b/website/templates/emails/request_deactivation_complete.html.mako new file mode 100644 index 00000000000..6ef726186e3 --- /dev/null +++ b/website/templates/emails/request_deactivation_complete.html.mako @@ -0,0 +1,19 @@ +<%inherit file="notify_base.mako" /> + +<%def name="content()"> + + + Hi ${user.given_name}, +
+
+ + Your OSF account has been deactivated. You will not show up in search, nor will a profile be visible for you. + If you try to log in, you will receive an error message that your account has been disabled. If, in the future, + you would like to create an account with this email address, you can do so by emailing us at ${contact_email}. + +
+
+ Sincerely, + The OSF Team + + \ No newline at end of file diff --git a/website/templates/project/view_file.mako b/website/templates/project/view_file.mako index b2876bce2ce..98b9804d648 100644 --- a/website/templates/project/view_file.mako +++ b/website/templates/project/view_file.mako @@ -197,6 +197,7 @@ id: ${file_id | sjson, n}, checkoutUser: ${checkout_user if checkout_user else None | sjson, n}, isPreregCheckout: ${pre_reg_checkout if pre_reg_checkout else False | sjson, n}, + versionNames: ${version_names if version_names else [] | sjson, n}, urls: { %if error is None: render: ${ urls['render'] | sjson, n },