diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 68c6d25390a..28574f498dc 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,3 +1,5 @@
+default_language_version:
+ python: python3.6
repos:
- repo: https://github.com/asottile/add-trailing-comma
rev: v0.7.0
diff --git a/CHANGELOG b/CHANGELOG
index 307a19276c7..f927b3ecf03 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -2,6 +2,20 @@
We follow the CalVer (https://calver.org/) versioning scheme: YY.MINOR.MICRO.
+22.06.0 (2022-06-23)
+====================
+- Fix support for Dataverse files
+- Match Legacy behavior with new `show_as_unviewed` File field
+- Other assorted fixes for new Files page
+
+22.05.0 (2022-06-09)
+====================
+- Add institutional affiliations via ROR to minted DOIs
+- Improve file sorting by `date_modified`
+- Update help links
+- Add ability to create RegistrationSchemas via the Admin App
+- Update files page routing for upcoming FE release
+
22.04.0 (2022-03-31)
====================
- Update JS and Python dependencies
diff --git a/addons/base/exceptions.py b/addons/base/exceptions.py
index 551a9ffc7ca..cc3a70cdf3a 100644
--- a/addons/base/exceptions.py
+++ b/addons/base/exceptions.py
@@ -13,3 +13,9 @@ class InvalidAuthError(AddonError):
class HookError(AddonError):
pass
+
+class QueryError(AddonError):
+ pass
+
+class DoesNotExist(AddonError):
+ pass
diff --git a/addons/base/views.py b/addons/base/views.py
index 84f85af0b55..67b4bb30e28 100644
--- a/addons/base/views.py
+++ b/addons/base/views.py
@@ -19,6 +19,7 @@
from api.caching.tasks import update_storage_usage_with_size
+from addons.base import exceptions as addon_errors
from addons.base.models import BaseStorageAddon
from addons.osfstorage.models import OsfStorageFileNode
from addons.osfstorage.utils import update_analytics
@@ -55,7 +56,6 @@
)
from osf.metrics import PreprintView, PreprintDownload
from osf.utils import permissions
-from website.ember_osf_web.views import use_ember_app
from website.profile.utils import get_profile_image_url
from website.project import decorators
from website.project.decorators import must_be_contributor_or_public, must_be_valid_project, check_contributor_auth
@@ -726,21 +726,32 @@ def addon_view_or_download_file(auth, path, provider, **kwargs):
})
savepoint_id = transaction.savepoint()
- file_node = BaseFileNode.resolve_class(provider, BaseFileNode.FILE).get_or_create(target, path)
- if isinstance(target, Node) and waffle.flag_is_active(request, features.EMBER_FILE_PROJECT_DETAIL):
- return use_ember_app()
- if action != 'download' and isinstance(target, Registration) and waffle.flag_is_active(request, features.EMBER_FILE_REGISTRATION_DETAIL):
- if file_node.get_guid():
- guid = file_node.get_guid()
- else:
- guid = file_node.get_guid(create=True)
- guid.save()
- file_node.save()
- return redirect(f'{settings.DOMAIN}{guid._id}/')
+ try:
+ file_node = BaseFileNode.resolve_class(
+ provider, BaseFileNode.FILE
+ ).get_or_create(
+ target, path, **extras
+ )
+ except addon_errors.QueryError as e:
+ raise HTTPError(
+ http_status.HTTP_400_BAD_REQUEST,
+ data={
+ 'message_short': 'Bad Request',
+ 'message_long': str(e)
+ }
+ )
+ except addon_errors.DoesNotExist as e:
+ raise HTTPError(
+ http_status.HTTP_404_NOT_FOUND,
+ data={
+ 'message_short': 'Not Found',
+ 'message_long': str(e)
+ }
+ )
# Note: Cookie is provided for authentication to waterbutler
- # it is overriden to force authentication as the current user
+ # it is overridden to force authentication as the current user
# the auth header is also pass to support basic auth
version = file_node.touch(
request.headers.get('Authorization'),
@@ -749,6 +760,16 @@ def addon_view_or_download_file(auth, path, provider, **kwargs):
cookie=request.cookies.get(settings.COOKIE_NAME)
)
)
+
+ # There's no download action redirect to the Ember front-end file view and create guid.
+ if action != 'download':
+ if isinstance(target, Node) and waffle.flag_is_active(request, features.EMBER_FILE_PROJECT_DETAIL):
+ guid = file_node.get_guid(create=True)
+ return redirect(f'{settings.DOMAIN}{guid._id}/')
+ if isinstance(target, Registration) and waffle.flag_is_active(request, features.EMBER_FILE_REGISTRATION_DETAIL):
+ guid = file_node.get_guid(create=True)
+ return redirect(f'{settings.DOMAIN}{guid._id}/')
+
if version is None:
# File is either deleted or unable to be found in the provider location
# Rollback the insertion of the file_node
diff --git a/addons/bitbucket/api.py b/addons/bitbucket/api.py
index 92d3bbd690e..e7176b6395b 100644
--- a/addons/bitbucket/api.py
+++ b/addons/bitbucket/api.py
@@ -3,14 +3,14 @@
from addons.bitbucket import settings
from framework.exceptions import HTTPError
-
+from osf.utils.fields import ensure_str
from website.util.client import BaseClient
class BitbucketClient(BaseClient):
def __init__(self, access_token=None):
- self.access_token = access_token
+ self.access_token = ensure_str(access_token)
@property
def _default_headers(self):
diff --git a/addons/dataverse/models.py b/addons/dataverse/models.py
index 147eddc1fff..f586a99a958 100644
--- a/addons/dataverse/models.py
+++ b/addons/dataverse/models.py
@@ -1,14 +1,14 @@
# -*- coding: utf-8 -*-
from rest_framework import status as http_status
+from addons.base import exceptions as addon_errors
from addons.base.models import (BaseOAuthNodeSettings, BaseOAuthUserSettings,
BaseStorageAddon)
+from django.contrib.contenttypes.models import ContentType
from django.db import models
from framework.auth.decorators import Auth
from framework.exceptions import HTTPError
from osf.models.files import File, Folder, BaseFileNode
-from osf.utils.permissions import WRITE
-from framework.auth.core import _get_current_user
from addons.base import exceptions
from addons.dataverse.client import connect_from_settings_or_401
from addons.dataverse.serializer import DataverseSerializer
@@ -17,6 +17,36 @@
class DataverseFileNode(BaseFileNode):
_provider = 'dataverse'
+ @classmethod
+ def get_or_create(cls, target, path, **query_params):
+ '''Override get_or_create for Dataverse.
+
+ Dataverse is weird and reuses paths, so we need to extract a "version"
+ query param to determine which file to get. We also don't want to "create"
+ here, as that might lead to integrity errors.
+ '''
+ version = query_params.get('version', None)
+ if version not in {'latest', 'latest-published'}:
+ raise addon_errors.QueryError(
+ 'Dataverse requires a "version" query paramater. '
+ 'Acceptable options are "latest" or "latest-published"'
+ )
+
+ content_type = ContentType.objects.get_for_model(target)
+ try:
+ obj = cls.objects.get(
+ target_object_id=target.id,
+ target_content_type=content_type,
+ _path='/' + path.lstrip('/'),
+ _history__0__extra__datasetVersion=version,
+ )
+ except cls.DoesNotExist:
+ raise addon_errors.DoesNotExist(
+ f'Requested Dataverse file does not exist with version "{version}"'
+ )
+
+ return obj
+
class DataverseFolder(DataverseFileNode, Folder):
pass
@@ -33,25 +63,11 @@ def _hashes(self):
return None
def update(self, revision, data, save=True, user=None):
- """Note: Dataverse only has psuedo versions, pass None to not save them
+ """Note: Dataverse only has psuedo versions (_history), pass None to not save them
Call super to update _history and last_touched anyway.
- Dataverse requires a user for the weird check below
"""
- version = super(DataverseFile, self).update(None, data, user=user, save=save)
+ version = super().update(None, data, user=user, save=save)
version.identifier = revision
-
- user = user or _get_current_user()
- if not user or not self.target.has_permission(user, WRITE):
- try:
- # Users without edit permission can only see published files
- if not data['extra']['hasPublishedVersion']:
- # Blank out name and path for the render
- # Dont save because there's no reason to persist the change
- self.name = ''
- self.materialized_path = ''
- return (version, '
This file does not exist.
')
- except (KeyError, IndexError):
- pass
return version
diff --git a/addons/dropbox/models.py b/addons/dropbox/models.py
index 9bdce337b1a..05a7e6797ad 100644
--- a/addons/dropbox/models.py
+++ b/addons/dropbox/models.py
@@ -2,25 +2,22 @@
import logging
import os
-from oauthlib.common import generate_token
-
from addons.base.models import (BaseOAuthNodeSettings, BaseOAuthUserSettings,
BaseStorageAddon)
from django.db import models
from dropbox.dropbox import Dropbox
from dropbox.exceptions import ApiError, DropboxException
from dropbox.files import FolderMetadata
-from dropbox import DropboxOAuth2Flow, oauth
-from flask import request
+from furl import furl
from framework.auth import Auth
from framework.exceptions import HTTPError
-from framework.sessions import session
from osf.models.external import ExternalProvider
from osf.models.files import File, Folder, BaseFileNode
+from osf.utils.fields import ensure_str
from addons.base import exceptions
from addons.dropbox import settings
from addons.dropbox.serializer import DropboxSerializer
-from website.util import api_v2_url, web_url_for
+from website.util import api_v2_url
logger = logging.getLogger(__name__)
@@ -49,62 +46,31 @@ class Provider(ExternalProvider):
client_id = settings.DROPBOX_KEY
client_secret = settings.DROPBOX_SECRET
- # Explicitly override auth_url_base as None -- DropboxOAuth2Flow handles this for us
- auth_url_base = None
- callback_url = None
- handle_callback = None
-
- @property
- def oauth_flow(self):
- if 'oauth_states' not in session.data:
- session.data['oauth_states'] = {}
- if self.short_name not in session.data['oauth_states']:
- session.data['oauth_states'][self.short_name] = {
- 'state': generate_token()
- }
- return DropboxOAuth2Flow(
- self.client_id,
- self.client_secret,
- redirect_uri=web_url_for(
- 'oauth_callback',
- service_name=self.short_name,
- _absolute=True
- ),
- session=session.data['oauth_states'][self.short_name], csrf_token_session_key='state'
- )
+ auth_url_base = settings.DROPBOX_OAUTH_AUTH_ENDPOINT
+ callback_url = settings.DROPBOX_OAUTH_TOKEN_ENDPOINT
+ auto_refresh_url = settings.DROPBOX_OAUTH_TOKEN_ENDPOINT
+ refresh_time = settings.REFRESH_TIME
@property
def auth_url(self):
- ret = self.oauth_flow.start('force_reapprove=true')
- session.save()
- return ret
-
- # Overrides ExternalProvider
- def auth_callback(self, user):
- # TODO: consider not using client library during auth flow
- try:
- access_token = self.oauth_flow.finish(request.values).access_token
- except (oauth.NotApprovedException, oauth.BadStateException):
- # 1) user cancelled and client library raised exc., or
- # 2) the state was manipulated, possibly due to time.
- # Either way, return and display info about how to properly connect.
- return
- except (oauth.ProviderException, oauth.CsrfException):
- raise HTTPError(http_status.HTTP_403_FORBIDDEN)
- except oauth.BadRequestException:
- raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
+ # Dropbox requires explicitly requesting refresh_tokens via `token_access_type`
+ # https://developers.dropbox.com/oauth-guide#implementing-oauth
+ url = super(Provider, self).auth_url
+ return furl(url).add({'token_access_type': 'offline'}).url
+ def handle_callback(self, response):
+ access_token = response['access_token']
self.client = Dropbox(access_token)
-
info = self.client.users_get_current_account()
- return self._set_external_account(
- user,
- {
- 'key': access_token,
- 'provider_id': info.account_id,
- 'display_name': info.name.display_name,
- }
- )
+ return {
+ 'key': access_token,
+ 'provider_id': info.account_id,
+ 'display_name': info.name.display_name,
+ }
+
+ def fetch_access_token(self, force_refresh=False):
+ self.refresh_oauth_key(force=force_refresh)
+ return ensure_str(self.account.oauth_key)
class UserSettings(BaseOAuthUserSettings):
@@ -119,7 +85,7 @@ def revoke_remote_oauth_access(self, external_account):
Tells Dropbox to remove the grant for the OSF associated with this account.
"""
- client = Dropbox(external_account.oauth_key)
+ client = Dropbox(Provider(external_account).fetch_access_token())
try:
client.auth_token_revoke()
except DropboxException:
@@ -158,6 +124,9 @@ def folder_path(self):
def display_name(self):
return '{0}: {1}'.format(self.config.full_name, self.folder)
+ def fetch_access_token(self):
+ return self.api.fetch_access_token()
+
def clear_settings(self):
self.folder = None
@@ -177,7 +146,7 @@ def get_folders(self, **kwargs):
}
}]
- client = Dropbox(self.external_account.oauth_key)
+ client = Dropbox(self.fetch_access_token())
try:
folder_id = '' if folder_id == '/' else folder_id
@@ -230,7 +199,7 @@ def deauthorize(self, auth=None, add_log=True):
def serialize_waterbutler_credentials(self):
if not self.has_auth:
raise exceptions.AddonError('Addon is not authorized')
- return {'token': self.external_account.oauth_key}
+ return {'token': self.fetch_access_token()}
def serialize_waterbutler_settings(self):
if not self.folder:
diff --git a/addons/dropbox/settings/defaults.py b/addons/dropbox/settings/defaults.py
index 829e162fc8c..e44e7a41eb0 100644
--- a/addons/dropbox/settings/defaults.py
+++ b/addons/dropbox/settings/defaults.py
@@ -3,6 +3,9 @@
DROPBOX_SECRET = None
DROPBOX_AUTH_CSRF_TOKEN = 'dropbox-auth-csrf-token'
+DROPBOX_OAUTH_AUTH_ENDPOINT = 'https://www.dropbox.com/oauth2/authorize'
+DROPBOX_OAUTH_TOKEN_ENDPOINT = 'https://www.dropbox.com/oauth2/token'
+REFRESH_TIME = 14399 # 4 hours
# Max file size permitted by frontend in megabytes
MAX_UPLOAD_SIZE = 150
diff --git a/addons/googledrive/README.md b/addons/googledrive/README.md
index 56682522cff..90b6c67fc7d 100644
--- a/addons/googledrive/README.md
+++ b/addons/googledrive/README.md
@@ -11,7 +11,8 @@
3. Click on the "Google Drive API" link, and enable it
4. Click on "Credentials", and "create credentials". Select "Oath Client ID", with "web application" and set the redirect uri to `http://localhost:5000/oauth/callback/googledrive/`
5. Submit your new client ID and make a note of your new ID and secret
-6. (Optional) You may find that the default 10 "QPS per User" rate limit is too restrictive. This can result in unexpected 403 "User Rate Limit Exceeded" messages. You may find it useful to request this limit be raised to 100. To do so, in the Google API console, from the dashboard of your project, click on "Google Drive API" in the list of APIs. Then click the "quotas" tab. Then click any of the pencils in the quotas table. Click the "apply for higher quota" link. Request that your "QPS per User" be raised to 100.
+6. Add yourself as a test user and ensure the oauth app is configured securely
+7. (Optional) You may find that the default 10 "QPS per User" rate limit is too restrictive. This can result in unexpected 403 "User Rate Limit Exceeded" messages. You may find it useful to request this limit be raised to 100. To do so, in the Google API console, from the dashboard of your project, click on "Google Drive API" in the list of APIs. Then click the "quotas" tab. Then click any of the pencils in the quotas table. Click the "apply for higher quota" link. Request that your "QPS per User" be raised to 100.
### Enable for OSF
1. Create a local googledrive settings file with `cp addons/googledrive/settings/local-dist.py addons/googledrive/settings/local.py`
diff --git a/addons/osfstorage/models.py b/addons/osfstorage/models.py
index 3d24b2237da..aa9d8932317 100644
--- a/addons/osfstorage/models.py
+++ b/addons/osfstorage/models.py
@@ -85,7 +85,7 @@ def get(cls, _id, target):
return cls.objects.get(_id=_id, target_object_id=target.id, target_content_type=ContentType.objects.get_for_model(target))
@classmethod
- def get_or_create(cls, target, path):
+ def get_or_create(cls, target, path, **unused_query_params):
"""Override get or create for osfstorage
Path is always the _id of the osfstorage filenode.
Use load here as its way faster than find.
diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py
index 74c64aa90ea..bbcc2b63d67 100644
--- a/addons/osfstorage/tests/test_views.py
+++ b/addons/osfstorage/tests/test_views.py
@@ -12,6 +12,8 @@
from dateutil.parser import parse as parse_datetime
from website import settings
+from addons.github.tests.factories import GitHubAccountFactory
+from addons.github.models import GithubFile
from addons.osfstorage.models import OsfStorageFileNode, OsfStorageFolder
from framework.auth.core import Auth
from addons.osfstorage.tests.utils import (
@@ -39,6 +41,8 @@
from osf_tests.factories import ProjectFactory, ApiOAuth2PersonalTokenFactory, PreprintFactory
from website.files.utils import attach_versions
from website.settings import EXTERNAL_EMBER_APPS
+from api_tests.draft_nodes.views.test_draft_node_files_lists import prepare_mock_wb_response
+
def create_record_with_version(path, node_settings, **kwargs):
version = factories.FileVersionFactory(**kwargs)
@@ -211,6 +215,17 @@ def test_metadata_not_found(self):
assert_equal(res.status_code, 404)
def test_metadata_not_found_lots_of_slashes(self):
+ res = self.send_hook(
+ 'osfstorage_get_metadata',
+ {'fid': '/not/fo/u/nd/'}, {},
+ self.node,
+ expect_errors=True,
+ )
+ assert_equal(res.status_code, 302)
+ assert '/login?service=' in res.location
+
+ self.node.is_public = True
+ self.node.save()
res = self.send_hook(
'osfstorage_get_metadata',
{'fid': '/not/fo/u/nd/'}, {},
@@ -1411,13 +1426,54 @@ def test_file_remove_tag_fail_doesnt_create_log(self, mock_log):
@pytest.mark.enable_bookmark_creation
class TestFileViews(StorageTestCase):
+ def add_github(self):
+ addon = self.node.add_addon('github', auth=Auth(self.user))
+ oauth_settings = GitHubAccountFactory()
+ oauth_settings.save()
+ self.user.add_addon('github')
+ self.user.external_accounts.add(oauth_settings)
+ self.user.save()
+ addon.user_settings = self.user.get_addon('github')
+ addon.external_account = oauth_settings
+ addon.repo = 'something'
+ addon.user = 'someone'
+ addon.save()
+ addon.user_settings.oauth_grants[self.project._id] = {
+ oauth_settings._id: []}
+ addon.user_settings.save()
+ self.node.save()
+
+
+ @responses.activate
+ def test_file_view_updates_history(self):
+ self.add_github()
+
+ # This represents a file add to github via github, without any OSF activity.
+ prepare_mock_wb_response(
+ folder=False,
+ path='/testpath',
+ node=self.node,
+ provider='github',
+ files=[
+ {'name': 'testpath', 'path': '/testpath', 'materialized': '/testpath', 'kind': 'file'},
+ ]
+ )
+ with override_flag(features.EMBER_FILE_PROJECT_DETAIL, active=True):
+ url = self.node.web_url_for('addon_view_or_download_file', path='testpath', provider='github')
+ self.app.get(url, auth=self.user.auth)
+ file = GithubFile.objects.get(_path='/testpath', provider='github')
+ assert file.history
+
@mock.patch('website.views.stream_emberapp')
def test_file_views(self, mock_ember):
with override_flag(features.EMBER_FILE_PROJECT_DETAIL, active=True):
file = create_test_file(target=self.node, user=self.user)
url = self.node.web_url_for('addon_view_or_download_file', path=file._id, provider=file.provider)
res = self.app.get(url, auth=self.user.auth)
- assert res.status_code == 200
+ assert res.status_code == 302
+ assert res.headers['Location'] == f'{settings.DOMAIN}{file.get_guid()._id}/'
+ assert not mock_ember.called
+ res.follow()
assert mock_ember.called
args, kwargs = mock_ember.call_args
diff --git a/addons/wiki/templates/wiki-bar-modal-help.mako b/addons/wiki/templates/wiki-bar-modal-help.mako
index 88e84f54875..cacd5edf88a 100644
--- a/addons/wiki/templates/wiki-bar-modal-help.mako
+++ b/addons/wiki/templates/wiki-bar-modal-help.mako
@@ -7,7 +7,7 @@
- The wiki uses the Markdown syntax. For more information and examples, go to our Guides.
+ The wiki uses the Markdown syntax. For more information and examples, go to our Guides.
+ {% if perms.osf.change_registrationschema %}
+
+ Registration Schemas
+
+
+
+
+ {% endif %}
{% endif %}
{% if perms.osf.view_conference %}
Meetings
diff --git a/admin/templates/registration_schemas/registration_schema.html b/admin/templates/registration_schemas/registration_schema.html
new file mode 100644
index 00000000000..46980fd45f9
--- /dev/null
+++ b/admin/templates/registration_schemas/registration_schema.html
@@ -0,0 +1,56 @@
+{% extends 'base.html' %}
+{% load static %}
+{% load node_extras %}
+{% block title %}
+ Registration Schema
+{% endblock title %}
+{% block content %}
+
+
+ {% for message in messages %}
+ - {{ message }}
+ {% endfor %}
+
+
+
+
Registration Schema Details
+
Associate this schema with a provider via the Providers Page
+
+
+
{{ registration_schema.name }} v{{ registration_schema.schema_version }}
+
+
+
+ Field |
+ Value |
+
+
+
+
+ Schema Version |
+ {{registration_schema.schema_version}} |
+
+
+ Description |
+ {{registration_schema.description}} |
+
+
+
+
+
+
+{% endblock content %}
diff --git a/admin/templates/registration_schemas/registration_schema_create.html b/admin/templates/registration_schemas/registration_schema_create.html
new file mode 100644
index 00000000000..4630aa36be8
--- /dev/null
+++ b/admin/templates/registration_schemas/registration_schema_create.html
@@ -0,0 +1,40 @@
+{% extends 'base.html' %}
+{% load static %}
+{% load node_extras %}
+{% block title %}
+ Registration Schema
+{% endblock title %}
+{% block content %}
+
+
+
+ {% for message in messages %}
+ - {{ message }}
+ {% endfor %}
+
+
+
+
+
Registration Schema Details
+
Remember to activate your Schema and associate it with a provider via the Providers Page
+
+
+
+{% endblock content %}
diff --git a/admin/templates/registration_schemas/registration_schema_list.html b/admin/templates/registration_schemas/registration_schema_list.html
new file mode 100644
index 00000000000..6b1c0393a6f
--- /dev/null
+++ b/admin/templates/registration_schemas/registration_schema_list.html
@@ -0,0 +1,77 @@
+{% extends 'base.html' %}
+{% load node_extras %}
+{% include "util/pagination.html" with items=page status=status %}
+{% block content %}
+ List of Registration Schema
+
+
+ {% for message in messages %}
+ - {{ message }}
+ {% endfor %}
+
+
+
+
+
+ Name |
+ Version |
+ Active (This determines if the schema can be used) |
+ Visible (This determines if the schema is listed in our list of schemas) |
+ Modified Date |
+ Delete |
+
+
+
+ {% for registration_schema in registration_schemas %}
+
+
+ {{ registration_schema.name }}
+ |
+
+ {{ registration_schema.schema_version }}
+ |
+
+ {{ registration_schema.active }}
+ |
+
+ {{ registration_schema.visible }}
+ |
+
+ {{ registration_schema.modified }}
+ |
+
+
+ Delete
+
+ |
+
+
+ {% endfor %}
+
+
+{% endblock content %}
\ No newline at end of file
diff --git a/admin/yarn.lock b/admin/yarn.lock
index 4de0a6d252c..90252afd898 100644
--- a/admin/yarn.lock
+++ b/admin/yarn.lock
@@ -5,19 +5,19 @@
JSON2@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/JSON2/-/JSON2-0.1.0.tgz#8d7493040a63d5835af75f47decb83ab6c8c0790"
- integrity sha1-jXSTBApj1YNa919H3suDq2yMB5A=
+ integrity sha512-MRguCg79vy8Kx15/CXzoO5pEPi0tQq7T70mL/t1Hv3G+hVfJVO2BZqX3sl2kbWW08GAmzQYSRfWRtQhmK/eaYA==
acorn-dynamic-import@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/acorn-dynamic-import/-/acorn-dynamic-import-2.0.2.tgz#c752bd210bef679501b6c6cb7fc84f8f47158cc4"
- integrity sha1-x1K9IQvvZ5UBtsbLf8hPj0cVjMQ=
+ integrity sha512-GKp5tQ8h0KMPWIYGRHHXI1s5tUpZixZ3IHF2jAu42wSCf6In/G873s6/y4DdKdhWvzhu1T6mE1JgvnhAKqyYYQ==
dependencies:
acorn "^4.0.3"
acorn@^4.0.3:
version "4.0.13"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
- integrity sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=
+ integrity sha512-fu2ygVGuMmlzG8ZeRJ0bvR41nsAkxxhbyk8bZ1SS521Z7vmgJFTQQlfz/Mp/nJexGBz+v8sC9bM6+lNgskt4Ug==
acorn@^5.0.0:
version "5.7.4"
@@ -44,25 +44,30 @@ ajv@^6.1.0:
json-schema-traverse "^0.4.1"
uri-js "^4.2.2"
+alphanum-sort@^1.0.1, alphanum-sort@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/alphanum-sort/-/alphanum-sort-1.0.2.tgz#97a1119649b211ad33691d9f9f486a8ec9fbe0a3"
+ integrity sha512-0FcBfdcmaumGPQ0qPn7Q5qTgz/ooXgIyp1rf8ik5bGX8mpE2YHjC0P/eyQvxu1GURYQgq9ozf2mteQ5ZD9YiyQ==
+
ansi-regex@^1.0.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-1.1.1.tgz#41c847194646375e6a1a5d10c3ca054ef9fc980d"
- integrity sha1-QchHGUZGN15qGl0Qw8oFTvn8mA0=
+ integrity sha512-q5i8bFLg2wDfsuR56c1NzlJFPzVD+9mxhDrhqOGigEFa87OZHlF+9dWeGWzVTP/0ECiA/JUGzfzRr2t3eYORRw==
ansi-regex@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
- integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8=
+ integrity sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==
ansi-regex@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998"
- integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1"
+ integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==
ansi-styles@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe"
- integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=
+ integrity sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA==
ansi-styles@^3.2.1:
version "3.2.1"
@@ -76,6 +81,13 @@ aproba@^1.1.1:
resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==
+argparse@^1.0.7:
+ version "1.0.10"
+ resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
+ integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
+ dependencies:
+ sprintf-js "~1.0.2"
+
asn1.js@^5.2.0:
version "5.4.1"
resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-5.4.1.tgz#11a980b84ebb91781ce35b0fdc2ee294e3783f07"
@@ -97,19 +109,31 @@ assert@^1.1.1:
async@^1.4.0:
version "1.5.2"
resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
- integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=
+ integrity sha512-nSVgobk4rv61R9PUSDtYt7mPVB2olxNR5RWJcAsH676/ef11bUZwvu7+RGYrYauVdDPcO519v68wRhXQtxsV9w==
async@^2.1.2:
- version "2.6.3"
- resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff"
- integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==
+ version "2.6.4"
+ resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221"
+ integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==
dependencies:
lodash "^4.17.14"
+autoprefixer@^6.3.1:
+ version "6.7.7"
+ resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-6.7.7.tgz#1dbd1c835658e35ce3f9984099db00585c782014"
+ integrity sha512-WKExI/eSGgGAkWAO+wMVdFObZV7hQen54UpD1kCCTN3tvlL3W1jL4+lPP/M7MwoP7Q4RHzKtO3JQ4HxYEcd+xQ==
+ dependencies:
+ browserslist "^1.7.6"
+ caniuse-db "^1.0.30000634"
+ normalize-range "^0.1.2"
+ num2fraction "^1.2.2"
+ postcss "^5.2.16"
+ postcss-value-parser "^3.2.3"
+
babel-code-frame@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b"
- integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=
+ integrity sha512-XqYMR2dfdGMW+hd0IUZ2PwK+fGeFkOxZJ0wY+JaQAHzt1Zx8LcvpiZD2NiGkEG8qx0CfkAOr5xt76d1e8vG90g==
dependencies:
chalk "^1.1.3"
esutils "^2.0.2"
@@ -157,7 +181,7 @@ babel-generator@^6.26.0:
babel-helpers@^6.24.1:
version "6.24.1"
resolved "https://registry.yarnpkg.com/babel-helpers/-/babel-helpers-6.24.1.tgz#3471de9caec388e5c850e597e58a26ddf37602b2"
- integrity sha1-NHHenK7DiOXIUOWX5Yom3fN2ArI=
+ integrity sha512-n7pFrqQm44TCYvrCDb0MqabAF+JUBq+ijBvNMUxpkLjJaAu32faIexewMumrH5KLLJ1HDyT0PTEqRyAe/GwwuQ==
dependencies:
babel-runtime "^6.22.0"
babel-template "^6.24.1"
@@ -165,7 +189,7 @@ babel-helpers@^6.24.1:
babel-loader@^6.4.1:
version "6.4.1"
resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-6.4.1.tgz#0b34112d5b0748a8dcdbf51acf6f9bd42d50b8ca"
- integrity sha1-CzQRLVsHSKjc2/Uaz2+b1C1QuMo=
+ integrity sha512-hHvbCsXtwKIznu5Qmqfe/IwZ4O5frqe+j04fN/5u/9Rg48dpWIKyYqAN68N1wwqGSMToo4FhU9/MrH+QZlFdkQ==
dependencies:
find-cache-dir "^0.1.1"
loader-utils "^0.2.16"
@@ -175,14 +199,14 @@ babel-loader@^6.4.1:
babel-messages@^6.23.0:
version "6.23.0"
resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e"
- integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4=
+ integrity sha512-Bl3ZiA+LjqaMtNYopA9TYE9HP1tQ+E5dLxE0XrAzcIJeK2UqF0/EaqXwBn9esd4UmTfEab+P+UYQ1GnioFIb/w==
dependencies:
babel-runtime "^6.22.0"
babel-register@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-register/-/babel-register-6.26.0.tgz#6ed021173e2fcb486d7acb45c6009a856f647071"
- integrity sha1-btAhFz4vy0htestFxgCahW9kcHE=
+ integrity sha512-veliHlHX06wjaeY8xNITbveXSiI+ASFnOqvne/LaIJIqOWi2Ogmj91KOugEz/hoh/fwMhXNBJPCv8Xaz5CyM4A==
dependencies:
babel-core "^6.26.0"
babel-runtime "^6.26.0"
@@ -195,7 +219,7 @@ babel-register@^6.26.0:
babel-runtime@^6.22.0, babel-runtime@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe"
- integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4=
+ integrity sha512-ITKNuq2wKlW1fJg9sSW52eepoYgZBggvOAHC0u/CYu/qxQ9EVzThCgR69BnSXLHjy2f7SY5zaQ4yt7H9ZVxY2g==
dependencies:
core-js "^2.4.0"
regenerator-runtime "^0.11.0"
@@ -203,7 +227,7 @@ babel-runtime@^6.22.0, babel-runtime@^6.26.0:
babel-template@^6.24.1, babel-template@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02"
- integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI=
+ integrity sha512-PCOcLFW7/eazGUKIoqH97sO9A2UYMahsn/yRQ7uOk37iutwjq7ODtcTNF+iFDSHNfkctqsLRjLP7URnOx0T1fg==
dependencies:
babel-runtime "^6.26.0"
babel-traverse "^6.26.0"
@@ -214,7 +238,7 @@ babel-template@^6.24.1, babel-template@^6.26.0:
babel-traverse@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee"
- integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4=
+ integrity sha512-iSxeXx7apsjCHe9c7n8VtRXGzI2Bk1rBSOJgCCjfyXb6v1aCqE1KSEpq/8SXuVN8Ka/Rh1WDTF0MDzkvTA4MIA==
dependencies:
babel-code-frame "^6.26.0"
babel-messages "^6.23.0"
@@ -229,7 +253,7 @@ babel-traverse@^6.26.0:
babel-types@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497"
- integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc=
+ integrity sha512-zhe3V/26rCWsEZK8kZN+HaQj5yQ1CilTObixFzKW1UWjqG7618Twz6YEsCnjfg5gBcJh02DrpCkS9h98ZqDY+g==
dependencies:
babel-runtime "^6.26.0"
esutils "^2.0.2"
@@ -241,6 +265,11 @@ babylon@^6.18.0:
resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3"
integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==
+balanced-match@^0.4.2:
+ version "0.4.2"
+ resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-0.4.2.tgz#cb3f3e3c732dc0f01ee70b403f302e61d7709838"
+ integrity sha512-STw03mQKnGUYtoNjmowo4F2cRmIIxYEGiMsjjwla/u5P1lxadj/05WkNaFjNiKTgJkj8KiXbgAiRTmcQRwQNtg==
+
balanced-match@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
@@ -272,33 +301,26 @@ bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9:
integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==
bn.js@^5.0.0, bn.js@^5.1.1:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.0.tgz#358860674396c6997771a9d051fcc1b57d4ae002"
- integrity sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==
+ version "5.2.1"
+ resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70"
+ integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==
-bootstrap-colorpicker@^3.4.0:
- version "3.4.0"
- resolved "https://registry.yarnpkg.com/bootstrap-colorpicker/-/bootstrap-colorpicker-3.4.0.tgz#3d1873071542755a9b31cf5b314f771e2fcc7727"
- integrity sha512-7vA0hvLrat3ptobEKlT9+6amzBUJcDAoh6hJRQY/AD+5dVZYXXf1ivRfrTwmuwiVLJo9rZwM8YB4lYzp6agzqg==
+bootstrap-colorpicker@^2.5.2:
+ version "2.5.3"
+ resolved "https://registry.yarnpkg.com/bootstrap-colorpicker/-/bootstrap-colorpicker-2.5.3.tgz#b50aff8590fbaa6b5aa63a5624e4213f1659a49d"
+ integrity sha512-xdllX8LSMvKULs3b8JrgRXTvyvjkSMHHHVuHjjN5FNMqr6kRe5NPiMHFmeAFjlgDF73MspikudLuEwR28LbzLw==
dependencies:
- bootstrap ">=4.0"
- jquery ">=2.2"
- popper.js ">=1.10"
+ jquery ">=1.10"
bootstrap-tagsinput@^0.7.1:
version "0.7.1"
resolved "https://registry.yarnpkg.com/bootstrap-tagsinput/-/bootstrap-tagsinput-0.7.1.tgz#ffe3b06bbe2a106945ef2814568005a94f211937"
- integrity sha1-/+Owa74qEGlF7ygUVoAFqU8hGTc=
-
-bootstrap@>=4.0:
- version "5.1.3"
- resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-5.1.3.tgz#ba081b0c130f810fa70900acbc1c6d3c28fa8f34"
- integrity sha512-fcQztozJ8jToQWXxVuEyXWW+dSo8AiXWKwiSSrKWsRB/Qt+Ewwza+JWoLKiTuQLaEPhdNAJ7+Dosc9DOIqNy7Q==
+ integrity sha512-xSks67GWgXLnmO5gqp788vhh7WoXd9mHj5uKE5zg8rvw3sNYYSCjrSlrPRlPdpYKwmuxeuf2jsNjBSWEucyB1w==
bower@^1.3.12:
- version "1.8.13"
- resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.13.tgz#5892f391f47134db88ea93d4c91a31f09792dc58"
- integrity sha512-8eWko16JlCTdaZZG70kddHPed17pHEbH8/IjfP4IFkQsfEqRsyNM09Dc8cDBFkSvtQ/2lTea7A+bMhRggG2a+Q==
+ version "1.8.14"
+ resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.14.tgz#985722a3c1fcd35c93d4136ecbeafbeaaea74e86"
+ integrity sha512-8Rq058FD91q9Nwthyhw0la9fzpBz0iwZTrt51LWl+w+PnJgZk9J+5wp3nibsJcIUPglMYXr4NRBaR+TUj0OkBQ==
brace-expansion@^1.1.7:
version "1.1.11"
@@ -311,7 +333,7 @@ brace-expansion@^1.1.7:
brorand@^1.0.1, brorand@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f"
- integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=
+ integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==
browserify-aes@^1.0.0, browserify-aes@^1.0.4:
version "1.2.0"
@@ -370,7 +392,7 @@ browserify-sign@^4.0.0:
browserify-versionify@1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/browserify-versionify/-/browserify-versionify-1.0.3.tgz#927eaaf85f16fe8d8d59eb1c6da76b488c01406c"
- integrity sha1-kn6q+F8W/o2NWescbadrSIwBQGw=
+ integrity sha512-QJ6s79FZWcKlYptoyd1K5/m+hYuVN8aW8k5b5Z7U9GhL8lR9YrAtADftcSeBJOeltjqfr2JlmtU3FfS+GU/cXQ==
dependencies:
find-root "^0.1.1"
through2 "0.6.3"
@@ -382,6 +404,14 @@ browserify-zlib@^0.2.0:
dependencies:
pako "~1.0.5"
+browserslist@^1.3.6, browserslist@^1.5.2, browserslist@^1.7.6:
+ version "1.7.7"
+ resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-1.7.7.tgz#0bd76704258be829b2398bb50e4b62d1a166b0b9"
+ integrity sha512-qHJblDE2bXVRYzuDetv/wAeHOJyO97+9wxC1cdCtyzgNuSozOyRCiiLaCR1f71AN66lQdVVBipWm63V+a7bPOw==
+ dependencies:
+ caniuse-db "^1.0.30000639"
+ electron-to-chromium "^1.2.7"
+
buffer-from@^1.0.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
@@ -390,7 +420,7 @@ buffer-from@^1.0.0:
buffer-xor@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9"
- integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=
+ integrity sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ==
buffer@^4.3.0:
version "4.9.2"
@@ -404,7 +434,7 @@ buffer@^4.3.0:
builtin-status-codes@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8"
- integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug=
+ integrity sha512-HpGFw18DgFWlncDfjTa2rcQ4W88O1mC8e8yZ2AvQY5KDaktSTwo+KRf6nHK6FRI5FyRyb/5T6+TSxfP7QyGsmQ==
cacache@^10.0.4:
version "10.0.4"
@@ -428,12 +458,27 @@ cacache@^10.0.4:
camelcase@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd"
- integrity sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=
+ integrity sha512-FxAv7HpHrXbh3aPo4o2qxHay2lkLY3x5Mw3KeE4KQE8ysVfziWeRZDwcjauvwBSGEC/nXUPzZy8zeh4HokqOnw==
+
+caniuse-api@^1.5.2:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-1.6.1.tgz#b534e7c734c4f81ec5fbe8aca2ad24354b962c6c"
+ integrity sha512-SBTl70K0PkDUIebbkXrxWqZlHNs0wRgRD6QZ8guctShjbh63gEPfF+Wj0Yw+75f5Y8tSzqAI/NcisYv/cCah2Q==
+ dependencies:
+ browserslist "^1.3.6"
+ caniuse-db "^1.0.30000529"
+ lodash.memoize "^4.1.2"
+ lodash.uniq "^4.5.0"
+
+caniuse-db@^1.0.30000529, caniuse-db@^1.0.30000634, caniuse-db@^1.0.30000639:
+ version "1.0.30001359"
+ resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30001359.tgz#6642bf0d654c7db0df413e45d9be9ee15db822a1"
+ integrity sha512-9c4zPEXTKlVEWtHY5SDVxQZg09US8oyCf7wEPByyGR7T9lKXdhgxMiyS5DWwXaz4cPJiW4zmkM/mUsJSBb9VXw==
chalk@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98"
- integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=
+ integrity sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A==
dependencies:
ansi-styles "^2.2.1"
escape-string-regexp "^1.0.2"
@@ -453,7 +498,7 @@ chalk@^2.4.1:
charenc@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
- integrity sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=
+ integrity sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==
chownr@^1.0.1:
version "1.1.4"
@@ -468,21 +513,40 @@ cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
inherits "^2.0.1"
safe-buffer "^5.0.1"
+clap@^1.0.9:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/clap/-/clap-1.2.3.tgz#4f36745b32008492557f46412d66d50cb99bce51"
+ integrity sha512-4CoL/A3hf90V3VIEjeuhSvlGFEHKzOz+Wfc2IVZc+FaUgU0ZQafJTP49fvnULipOPcAfqhyI2duwQyns6xqjYA==
+ dependencies:
+ chalk "^1.1.3"
+
cliui@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d"
- integrity sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=
+ integrity sha512-0yayqDxWQbqk3ojkYqUKqaAQ6AfNKeKWRNA8kR0WXzAsdHpP4BIaOmMAG87JGuO6qcobyW4GjxHd9PmhEd+T9w==
dependencies:
string-width "^1.0.1"
strip-ansi "^3.0.1"
wrap-ansi "^2.0.0"
+clone@^1.0.2:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e"
+ integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==
+
+coa@~1.0.1:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/coa/-/coa-1.0.4.tgz#a9ef153660d6a86a8bdec0289a5c684d217432fd"
+ integrity sha512-KAGck/eNAmCL0dcT3BiuYwLbExK6lduR8DxM3C1TyDzaXhZHyZ8ooX5I5+na2e3dPFuibfxrGdorr0/Lr7RYCQ==
+ dependencies:
+ q "^1.1.2"
+
code-point-at@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
- integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=
+ integrity sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA==
-color-convert@^1.9.0:
+color-convert@^1.3.0, color-convert@^1.9.0:
version "1.9.3"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
@@ -492,7 +556,42 @@ color-convert@^1.9.0:
color-name@1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
- integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
+ integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==
+
+color-name@^1.0.0:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
+ integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
+
+color-string@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/color-string/-/color-string-0.3.0.tgz#27d46fb67025c5c2fa25993bfbf579e47841b991"
+ integrity sha512-sz29j1bmSDfoAxKIEU6zwoIZXN6BrFbAMIhfYCNyiZXBDuU/aiHlN84lp/xDzL2ubyFhLDobHIlU1X70XRrMDA==
+ dependencies:
+ color-name "^1.0.0"
+
+color@^0.11.0:
+ version "0.11.4"
+ resolved "https://registry.yarnpkg.com/color/-/color-0.11.4.tgz#6d7b5c74fb65e841cd48792ad1ed5e07b904d764"
+ integrity sha512-Ajpjd8asqZ6EdxQeqGzU5WBhhTfJ/0cA4Wlbre7e5vXfmDSmda7Ov6jeKoru+b0vHcb1CqvuroTHp5zIWzhVMA==
+ dependencies:
+ clone "^1.0.2"
+ color-convert "^1.3.0"
+ color-string "^0.3.0"
+
+colormin@^1.0.5:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/colormin/-/colormin-1.1.2.tgz#ea2f7420a72b96881a38aae59ec124a6f7298133"
+ integrity sha512-XSEQUUQUR/lXqGyddiNH3XYFUPYlYr1vXy9rTFMsSOw+J7Q6EQkdlQIrTlYn4TccpsOaUE1PYQNjBn20gwCdgQ==
+ dependencies:
+ color "^0.11.0"
+ css-color-names "0.0.4"
+ has "^1.0.1"
+
+colors@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63"
+ integrity sha512-ENwblkFQpqqia6b++zLD/KUWafYlVY/UNnAp7oz7LY7E924wmpye416wBOmvv/HMWzl8gL1kJlfvId/1Dg176w==
combined-stream@^1.0.5:
version "1.0.8"
@@ -509,7 +608,7 @@ commander@~2.13.0:
commondir@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
- integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=
+ integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==
component-emitter@^1.1.3:
version "1.3.0"
@@ -519,12 +618,12 @@ component-emitter@^1.1.3:
component-emitter@~1.2.0:
version "1.2.1"
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6"
- integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=
+ integrity sha512-jPatnhd33viNplKjqXKRkGU345p263OIWzDL2wH3LGIGp5Kojo+uXizHmOADRvhGFFTnJqX3jBAKP6vvmSDKcA==
concat-map@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
- integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
+ integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==
concat-stream@^1.5.0:
version "1.6.2"
@@ -544,7 +643,7 @@ console-browserify@^1.1.0:
constants-browserify@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75"
- integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U=
+ integrity sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ==
convert-source-map@^1.5.1:
version "1.8.0"
@@ -556,12 +655,12 @@ convert-source-map@^1.5.1:
cookie@0.3.1:
version "0.3.1"
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb"
- integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=
+ integrity sha512-+IJOX0OqlHCszo2mBUq+SrEbCj6w7Kpffqx60zYbPTFaO4+yYgRjHwcZNpWvaTylDHaV7PPmBHzSecZiMhtPgw==
cookiejar@2.0.6:
version "2.0.6"
resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.0.6.tgz#0abf356ad00d1c5a219d88d44518046dd026acfe"
- integrity sha1-Cr81atANHFohnYjURRgEbdAmrP4=
+ integrity sha512-X9IsySmsr1heROBZCpyEYhqJyU7CXNJoVxIlQ5bBb7DskYUx0mQ+g2f7yPYajceZeGJWHQbIfGB6j0hywV/ARQ==
copy-anything@^2.0.1:
version "2.0.6"
@@ -626,7 +725,7 @@ create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7:
cross-spawn@^5.0.1:
version "5.1.0"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449"
- integrity sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=
+ integrity sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==
dependencies:
lru-cache "^4.0.1"
shebang-command "^1.2.0"
@@ -635,7 +734,7 @@ cross-spawn@^5.0.1:
crypt@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
- integrity sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=
+ integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
crypto-browserify@^3.11.0:
version "3.12.0"
@@ -654,17 +753,24 @@ crypto-browserify@^3.11.0:
randombytes "^2.0.0"
randomfill "^1.0.3"
-css-loader@^1.0.1, css-loader@~0.26.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-1.0.1.tgz#6885bb5233b35ec47b006057da01cc640b6b79fe"
- integrity sha512-+ZHAZm/yqvJ2kDtPne3uX0C+Vr3Zn5jFn2N4HywtS5ujwvsVkyg0VArEXpl3BgczDA8anieki1FIzhchX4yrDw==
+css-color-names@0.0.4:
+ version "0.0.4"
+ resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0"
+ integrity sha512-zj5D7X1U2h2zsXOAM8EyUREBnnts6H+Jm+d1M2DbiQQcUtnqgQsMrdo8JW9R80YFUmIdBZeMu5wvYM7hcgWP/Q==
+
+css-loader@^0.28.11, css-loader@~0.26.1:
+ version "0.28.11"
+ resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-0.28.11.tgz#c3f9864a700be2711bb5a2462b2389b1a392dab7"
+ integrity sha512-wovHgjAx8ZIMGSL8pTys7edA1ClmzxHeY6n/d97gg5odgsxEgKjULPR0viqyC+FWMCL9sfqoC/QCUBo62tLvPg==
dependencies:
babel-code-frame "^6.26.0"
css-selector-tokenizer "^0.7.0"
+ cssnano "^3.10.0"
icss-utils "^2.1.0"
loader-utils "^1.0.2"
- lodash "^4.17.11"
- postcss "^6.0.23"
+ lodash.camelcase "^4.3.0"
+ object-assign "^4.1.1"
+ postcss "^5.0.6"
postcss-modules-extract-imports "^1.2.0"
postcss-modules-local-by-default "^1.2.0"
postcss-modules-scope "^1.1.0"
@@ -685,10 +791,56 @@ cssesc@^3.0.0:
resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee"
integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==
+cssnano@^3.10.0:
+ version "3.10.0"
+ resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-3.10.0.tgz#4f38f6cea2b9b17fa01490f23f1dc68ea65c1c38"
+ integrity sha512-0o0IMQE0Ezo4b41Yrm8U6Rp9/Ag81vNXY1gZMnT1XhO4DpjEf2utKERqWJbOoz3g1Wdc1d3QSta/cIuJ1wSTEg==
+ dependencies:
+ autoprefixer "^6.3.1"
+ decamelize "^1.1.2"
+ defined "^1.0.0"
+ has "^1.0.1"
+ object-assign "^4.0.1"
+ postcss "^5.0.14"
+ postcss-calc "^5.2.0"
+ postcss-colormin "^2.1.8"
+ postcss-convert-values "^2.3.4"
+ postcss-discard-comments "^2.0.4"
+ postcss-discard-duplicates "^2.0.1"
+ postcss-discard-empty "^2.0.1"
+ postcss-discard-overridden "^0.1.1"
+ postcss-discard-unused "^2.2.1"
+ postcss-filter-plugins "^2.0.0"
+ postcss-merge-idents "^2.1.5"
+ postcss-merge-longhand "^2.0.1"
+ postcss-merge-rules "^2.0.3"
+ postcss-minify-font-values "^1.0.2"
+ postcss-minify-gradients "^1.0.1"
+ postcss-minify-params "^1.0.4"
+ postcss-minify-selectors "^2.0.4"
+ postcss-normalize-charset "^1.1.0"
+ postcss-normalize-url "^3.0.7"
+ postcss-ordered-values "^2.1.0"
+ postcss-reduce-idents "^2.2.2"
+ postcss-reduce-initial "^1.0.0"
+ postcss-reduce-transforms "^1.0.3"
+ postcss-svgo "^2.1.1"
+ postcss-unique-selectors "^2.0.2"
+ postcss-value-parser "^3.2.3"
+ postcss-zindex "^2.0.1"
+
+csso@~2.3.1:
+ version "2.3.2"
+ resolved "https://registry.yarnpkg.com/csso/-/csso-2.3.2.tgz#ddd52c587033f49e94b71fc55569f252e8ff5f85"
+ integrity sha512-FmCI/hmqDeHHLaIQckMhMZneS84yzUZdrWDAvJVVxOwcKE1P1LF9FGmzr1ktIQSxOw6fl3PaQsmfg+GN+VvR3w==
+ dependencies:
+ clap "^1.0.9"
+ source-map "^0.5.3"
+
cyclist@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9"
- integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk=
+ integrity sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A==
d@1, d@^1.0.1:
version "1.0.1"
@@ -712,15 +864,20 @@ debug@^3.2.6:
dependencies:
ms "^2.1.1"
-decamelize@^1.1.1:
+decamelize@^1.1.1, decamelize@^1.1.2:
version "1.2.0"
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
- integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
+ integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==
+
+defined@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
+ integrity sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ==
delayed-stream@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
- integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk=
+ integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==
des.js@^1.0.0:
version "1.0.1"
@@ -733,7 +890,7 @@ des.js@^1.0.0:
detect-indent@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208"
- integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg=
+ integrity sha512-BDKtmHlOzwI7iRuEkhzsnPoi5ypEhWAJB5RvHWe1kMr06js3uK5B3734i3ui5Yd+wOJV1cpE4JnivPD283GU/A==
dependencies:
repeating "^2.0.0"
@@ -754,7 +911,7 @@ domain-browser@^1.1.1:
domready@0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/domready/-/domready-0.3.0.tgz#b3740facbd09163018152d12aec239383e102175"
- integrity sha1-s3QPrL0JFjAYFS0SrsI5OD4QIXU=
+ integrity sha512-0w07yqKSi2caAo0qESL8Ou+ZkDfOrzxyJzSvJEdKmjOzPobPo7d4QFxpLjrq8zmigWP2mh2i/5F53NKSkPIqbA==
duplexify@^3.4.2, duplexify@^3.6.0:
version "3.7.1"
@@ -766,6 +923,11 @@ duplexify@^3.4.2, duplexify@^3.6.0:
readable-stream "^2.0.0"
stream-shift "^1.0.0"
+electron-to-chromium@^1.2.7:
+ version "1.4.172"
+ resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.172.tgz#87335795a3dc19e7b6dd5af291038477d81dc6b1"
+ integrity sha512-yDoFfTJnqBAB6hSiPvzmsBJSrjOXJtHSJoqJdI/zSIh7DYupYnIOHt/bbPw/WE31BJjNTybDdNAs21gCMnTh0Q==
+
elliptic@^6.5.3:
version "6.5.4"
resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb"
@@ -782,7 +944,7 @@ elliptic@^6.5.3:
emojis-list@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389"
- integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k=
+ integrity sha512-knHEZMgs8BB+MInokmNTg/OyPlAddghe1YBgNwJBc5zsJi/uyIcXoSDsL/W9ymOsBoBGdPIHXYJ9+qKFwRwDng==
emojis-list@^3.0.0:
version "3.0.0"
@@ -799,7 +961,7 @@ end-of-stream@^1.0.0, end-of-stream@^1.1.0:
enhanced-resolve@^3.4.0:
version "3.4.1"
resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz#0421e339fd71419b3da13d129b3979040230476e"
- integrity sha1-BCHjOf1xQZs9oT0Smzl5BAIwR24=
+ integrity sha512-ZaAux1rigq1e2nQrztHn4h2ugvpzZxs64qneNah+8Mh/K0CRqJFJc+UoXnUsq+1yX+DmQFPPdVqboKAJ89e0Iw==
dependencies:
graceful-fs "^4.1.2"
memory-fs "^0.4.0"
@@ -821,18 +983,18 @@ error-ex@^1.2.0:
is-arrayish "^0.2.1"
es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@~0.10.14:
- version "0.10.53"
- resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1"
- integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==
+ version "0.10.61"
+ resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.61.tgz#311de37949ef86b6b0dcea894d1ffedb909d3269"
+ integrity sha512-yFhIqQAzu2Ca2I4SE2Au3rxVfmohU9Y7wqGR+s7+H7krk26NXhIRAZDgqd6xqjCEFUomDEA3/Bo/7fKmIkW1kA==
dependencies:
- es6-iterator "~2.0.3"
- es6-symbol "~3.1.3"
- next-tick "~1.0.0"
+ es6-iterator "^2.0.3"
+ es6-symbol "^3.1.3"
+ next-tick "^1.1.0"
-es6-iterator@^2.0.3, es6-iterator@~2.0.1, es6-iterator@~2.0.3:
+es6-iterator@^2.0.3, es6-iterator@~2.0.1:
version "2.0.3"
resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7"
- integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c=
+ integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==
dependencies:
d "1"
es5-ext "^0.10.35"
@@ -841,7 +1003,7 @@ es6-iterator@^2.0.3, es6-iterator@~2.0.1, es6-iterator@~2.0.3:
es6-map@^0.1.3:
version "0.1.5"
resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.5.tgz#9136e0503dcc06a301690f0bb14ff4e364e949f0"
- integrity sha1-kTbgUD3MBqMBaQ8LsU/042TpSfA=
+ integrity sha512-mz3UqCh0uPCIqsw1SSAkB/p0rOzF/M0V++vyN7JqlPtSW/VsYgQBvVvqMLmfBuyMzTpLnNqi6JmcSizs4jy19A==
dependencies:
d "1"
es5-ext "~0.10.14"
@@ -853,7 +1015,7 @@ es6-map@^0.1.3:
es6-set@~0.1.5:
version "0.1.5"
resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1"
- integrity sha1-0rPsXU2ADO2BjbU40ol02wpzzLE=
+ integrity sha512-7S8YXIcUfPMOr3rqJBVMePAbRsD1nWeSMQ86K/lDI76S3WKXz+KWILvTIPbTroubOkZTGh+b+7/xIIphZXNYbA==
dependencies:
d "1"
es5-ext "~0.10.14"
@@ -864,12 +1026,12 @@ es6-set@~0.1.5:
es6-symbol@3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77"
- integrity sha1-vwDvT9q2uhtG7Le2KbTH7VcVzHc=
+ integrity sha512-exfuQY8UGtn/N+gL1iKkH8fpNd5sJ760nJq6mmZAHldfxMD5kX07lbQuYlspoXsuknXNv9Fb7y2GsPOnQIbxHg==
dependencies:
d "1"
es5-ext "~0.10.14"
-es6-symbol@^3.1.1, es6-symbol@~3.1.1, es6-symbol@~3.1.3:
+es6-symbol@^3.1.1, es6-symbol@^3.1.3, es6-symbol@~3.1.1:
version "3.1.3"
resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18"
integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==
@@ -890,18 +1052,23 @@ es6-weak-map@^2.0.1:
escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
- integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
+ integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==
escope@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/escope/-/escope-3.6.0.tgz#e01975e812781a163a6dadfdd80398dc64c889c3"
- integrity sha1-4Bl16BJ4GhY6ba392AOY3GTIicM=
+ integrity sha512-75IUQsusDdalQEW/G/2esa87J7raqdJF+Ca0/Xm5C3Q58Nr4yVYjZGp/P1+2xiEVgXRrA39dpRb8LcshajbqDQ==
dependencies:
es6-map "^0.1.3"
es6-weak-map "^2.0.1"
esrecurse "^4.1.0"
estraverse "^4.1.1"
+esprima@^2.6.0:
+ version "2.7.3"
+ resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581"
+ integrity sha512-OarPfz0lFCiW4/AV2Oy1Rp9qu0iusTKqykwTspGCZtPxmF81JR4MmIebvF1F9+UOKth2ZubLQ4XGGaU+hSn99A==
+
esrecurse@^4.1.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921"
@@ -927,7 +1094,7 @@ esutils@^2.0.2:
event-emitter@~0.3.5:
version "0.3.5"
resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39"
- integrity sha1-34xp7vFkeSPHFXuc6DhAYQsCzDk=
+ integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==
dependencies:
d "1"
es5-ext "~0.10.14"
@@ -948,7 +1115,7 @@ evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3:
execa@^0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777"
- integrity sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=
+ integrity sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw==
dependencies:
cross-spawn "^5.0.1"
get-stream "^3.0.0"
@@ -968,7 +1135,7 @@ ext@^1.1.2:
extend@3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.0.tgz#5a474353b9f3353ddd8176dfd37b91c83a46f1d4"
- integrity sha1-WkdDU7nzNT3dgXbf03uRyDpG8dQ=
+ integrity sha512-5mYyg57hpD+sFaJmgNL9BidQ5C7dmJE3U5vzlRWbuqG+8dytvYEoxvKs6Tj5cm3LpMsFvRt20qz1ckezmsOUgQ==
fast-deep-equal@^3.1.1:
version "3.1.3"
@@ -988,7 +1155,7 @@ fastparse@^1.1.2:
find-cache-dir@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-0.1.1.tgz#c8defae57c8a52a8a784f9e31c57c742e993a0b9"
- integrity sha1-yN765XyKUqinhPnjHFfHQumToLk=
+ integrity sha512-Z9XSBoNE7xQiV6MSgPuCfyMokH2K7JdpRkOYE1+mu3d4BFJtx3GW+f6Bo4q8IX6rlf5MYbLBKW0pjl2cWdkm2A==
dependencies:
commondir "^1.0.1"
mkdirp "^0.5.1"
@@ -997,7 +1164,7 @@ find-cache-dir@^0.1.1:
find-cache-dir@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-1.0.0.tgz#9288e3e9e3cc3748717d39eade17cf71fc30ee6f"
- integrity sha1-kojj6ePMN0hxfTnq3hfPcfww7m8=
+ integrity sha512-46TFiBOzX7xq/PcSWfFwkyjpemdRnMe31UQF+os0y+1W3k95f6R4SEt02Hj4p3X0Mir9gfrkmOtshFidS0VPUg==
dependencies:
commondir "^1.0.1"
make-dir "^1.0.0"
@@ -1006,12 +1173,12 @@ find-cache-dir@^1.0.0:
find-root@^0.1.1:
version "0.1.2"
resolved "https://registry.yarnpkg.com/find-root/-/find-root-0.1.2.tgz#98d2267cff1916ccaf2743b3a0eea81d79d7dcd1"
- integrity sha1-mNImfP8ZFsyvJ0OzoO6oHXnX3NE=
+ integrity sha512-GyDxVgA61TZcrgDJPqOqGBpi80Uf2yIstubgizi7AjC9yPdRrqBR+Y0MvK4kXnYlaoz3d+SGxDHMYVkwI/yd2w==
find-up@^1.0.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f"
- integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=
+ integrity sha512-jvElSjyuo4EMQGoTwo1uJU5pQMwTW5lS1x05zzfJuTIyLR3zwO27LYrxNg+dlvKpGOuGy/MzBdXh80g0ve5+HA==
dependencies:
path-exists "^2.0.0"
pinkie-promise "^2.0.0"
@@ -1019,10 +1186,15 @@ find-up@^1.0.0:
find-up@^2.0.0, find-up@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
- integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c=
+ integrity sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==
dependencies:
locate-path "^2.0.0"
+flatten@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/flatten/-/flatten-1.0.3.tgz#c1283ac9f27b368abc1e36d1ff7b04501a30356b"
+ integrity sha512-dVsPA/UwQ8+2uoFe5GHtiBMu48dWLTdsuEd7CKGlZlD78r1TTWBvDuFaFGKCo/ZfEr95Uk56vZoX86OsHkUeIg==
+
flush-write-stream@^1.0.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8"
@@ -1034,7 +1206,7 @@ flush-write-stream@^1.0.0:
font-awesome-webpack@0.0.5-beta.2:
version "0.0.5-beta.2"
resolved "https://registry.yarnpkg.com/font-awesome-webpack/-/font-awesome-webpack-0.0.5-beta.2.tgz#9ea5f22f0615d08e76d8db341563649a726286d6"
- integrity sha1-nqXyLwYV0I522Ns0FWNkmnJihtY=
+ integrity sha512-er4SI8BEy7gwq360+H0/s00IqekBuyOYgr5on3wG7/Kj9SQa8D3LJ87aj8zXTTyp+qswHtbWl5gye/ZhYJdrYQ==
dependencies:
css-loader "~0.26.1"
less-loader "~2.2.3"
@@ -1043,12 +1215,12 @@ font-awesome-webpack@0.0.5-beta.2:
font-awesome@^4.5.0:
version "4.7.0"
resolved "https://registry.yarnpkg.com/font-awesome/-/font-awesome-4.7.0.tgz#8fa8cf0411a1a31afd07b06d2902bb9fc815a133"
- integrity sha1-j6jPBBGhoxr9B7BtKQK7n8gVoTM=
+ integrity sha512-U6kGnykA/6bFmg1M/oT9EkFeIYv7JlX3bozwQJWiiLz6L0w3F5vBVPxHlwyX/vtNq1ckcpRKOB9f2Qal/VtFpg==
form-data@1.0.0-rc3:
version "1.0.0-rc3"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-1.0.0-rc3.tgz#d35bc62e7fbc2937ae78f948aaa0d38d90607577"
- integrity sha1-01vGLn+8KTeuePlIqqDTjZBgdXc=
+ integrity sha512-Z5JWXWsFDI8x73Rt/Dc7SK/EvKBzudhqIVBtEhcAhtoevCTqO3YJmctGBLzT0Ggg39xFcefkXt00t1TYLz6D0w==
dependencies:
async "^1.4.0"
combined-stream "^1.0.5"
@@ -1057,12 +1229,12 @@ form-data@1.0.0-rc3:
formidable@~1.0.14:
version "1.0.17"
resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.0.17.tgz#ef5491490f9433b705faa77249c99029ae348559"
- integrity sha1-71SRSQ+UM7cF+qdyScmQKa40hVk=
+ integrity sha512-95MFT5qipMvUiesmuvGP1BI4hh5XWCzyTapiNJ/k8JBQda7rPy7UCWYItz2uZEdTgGNy1eInjzlL9Wx1O9fedg==
from2@^2.1.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af"
- integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=
+ integrity sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==
dependencies:
inherits "^2.0.1"
readable-stream "^2.0.0"
@@ -1070,7 +1242,7 @@ from2@^2.1.0:
fs-write-stream-atomic@^1.0.8:
version "1.0.10"
resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9"
- integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk=
+ integrity sha512-gehEzmPn2nAwr39eay+x3X34Ra+M2QlVUTLhkXPjWdeO8RF9kszk116avgBJM3ZyNHgHXBNx+VmPaFC36k0PzA==
dependencies:
graceful-fs "^4.1.2"
iferr "^0.1.5"
@@ -1080,7 +1252,7 @@ fs-write-stream-atomic@^1.0.8:
fs.realpath@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
- integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
+ integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==
function-bind@^1.1.1:
version "1.1.1"
@@ -1095,7 +1267,7 @@ get-caller-file@^1.0.1:
get-stream@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14"
- integrity sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=
+ integrity sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==
glob-to-regexp@^0.4.1:
version "0.4.1"
@@ -1103,14 +1275,14 @@ glob-to-regexp@^0.4.1:
integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==
glob@^7.1.2, glob@^7.1.3:
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023"
- integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==
+ version "7.2.3"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b"
+ integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
inherits "2"
- minimatch "^3.0.4"
+ minimatch "^3.1.1"
once "^1.3.0"
path-is-absolute "^1.0.0"
@@ -1120,28 +1292,33 @@ globals@^9.18.0:
integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==
graceful-fs@^4.1.11, graceful-fs@^4.1.2:
- version "4.2.9"
- resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96"
- integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==
+ version "4.2.10"
+ resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c"
+ integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==
has-ansi@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91"
- integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=
+ integrity sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg==
dependencies:
ansi-regex "^2.0.0"
+has-flag@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa"
+ integrity sha512-DyYHfIYwAJmjAjSSPKANxI8bFY9YtFrgkAfinBojQ8YJTOuOuav64tMUJv584SES4xl74PmuaevIyaLESHdTAA==
+
has-flag@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51"
- integrity sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=
+ integrity sha512-P+1n3MnwjR/Epg9BBo1KT8qbye2g2Ou4sFumihwt6I4tsUX7jnLcX4BTOSKg/B1ZrIYMN9FcEnG4x5a7NB8Eng==
has-flag@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
- integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
+ integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==
-has@^1.0.3:
+has@^1.0.1, has@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
@@ -1168,7 +1345,7 @@ hash.js@^1.0.0, hash.js@^1.0.3:
hmac-drbg@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
- integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=
+ integrity sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==
dependencies:
hash.js "^1.0.3"
minimalistic-assert "^1.0.0"
@@ -1177,7 +1354,7 @@ hmac-drbg@^1.0.1:
home-or-tmp@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8"
- integrity sha1-42w/LSyufXRqhX440Y1fMqeILbg=
+ integrity sha512-ycURW7oUxE2sNiPVw1HVEFsW+ecOpJ5zaj7eC0RlwhibhRBod20muUN8qu/gzx956YrLolVvs1MTXwKgC2rVEg==
dependencies:
os-homedir "^1.0.0"
os-tmpdir "^1.0.1"
@@ -1187,27 +1364,32 @@ hosted-git-info@^2.1.4:
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9"
integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==
+html-comment-regex@^1.1.0:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.2.tgz#97d4688aeb5c81886a364faa0cad1dda14d433a7"
+ integrity sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ==
+
https-browserify@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73"
- integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=
+ integrity sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg==
-iconv-lite@^0.4.4:
- version "0.4.24"
- resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
- integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
+iconv-lite@^0.6.3:
+ version "0.6.3"
+ resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501"
+ integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==
dependencies:
- safer-buffer ">= 2.1.2 < 3"
+ safer-buffer ">= 2.1.2 < 3.0.0"
icss-replace-symbols@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded"
- integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0=
+ integrity sha512-chIaY3Vh2mh2Q3RGXttaDIzeiPvaVXJ+C4DAh/w3c37SKZ/U6PGMmuicR2EQQp9bKG8zLMCl7I+PtIoOOPp8Gg==
icss-utils@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-2.1.0.tgz#83f0a0ec378bf3246178b6c2ad9136f135b1c962"
- integrity sha1-g/Cg7DeL8yRheLbCrZE28TWxyWI=
+ integrity sha512-bsVoyn/1V4R1kYYjLcWLedozAM4FClZUdjE9nIr8uWY7xs78y9DATgwz2wGU7M+7z55KenmmTkN2DVJ7bqzjAA==
dependencies:
postcss "^6.0.1"
@@ -1219,22 +1401,27 @@ ieee754@^1.1.4:
iferr@^0.1.5:
version "0.1.5"
resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501"
- integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE=
+ integrity sha512-DUNFN5j7Tln0D+TxzloUjKB+CtVu6myn0JEFak6dG18mNt9YkQ6lzGCdafwofISZ1lLF3xRHJ98VKy9ynkcFaA==
image-size@~0.5.0:
version "0.5.5"
resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c"
- integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w=
+ integrity sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ==
imurmurhash@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
- integrity sha1-khi5srkoojixPcT7a21XbyMUU+o=
+ integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==
+
+indexes-of@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607"
+ integrity sha512-bup+4tap3Hympa+JBJUG7XuOsdNQ6fxt0MHyXMKuLBKn0OqsTfvUxkUrroEX1+B2VsSHvCjiIcZVxRtYa4nllA==
inflight@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
- integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
+ integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==
dependencies:
once "^1.3.0"
wrappy "1"
@@ -1247,12 +1434,12 @@ inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1,
inherits@2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1"
- integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE=
+ integrity sha512-8nWq2nLTAwd02jTqJExUYFSD/fKq6VH9Y/oG2accc/kdI0V98Bag8d5a4gi3XHz73rDWa2PvTtvcWYquKqSENA==
inherits@2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
- integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
+ integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==
interpret@^1.0.0:
version "1.4.0"
@@ -1269,22 +1456,27 @@ invariant@^2.2.2:
invert-kv@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6"
- integrity sha1-EEqOSqym09jNFXqO+L+rLXo//bY=
+ integrity sha512-xgs2NH9AE66ucSq4cNG1nhSFghr5l6tdL15Pk+jl46bmmBapgoaY/AacXyaDznAqmGL99TiLSQgO/XazFSKYeQ==
+
+is-absolute-url@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6"
+ integrity sha512-vOx7VprsKyllwjSkLV79NIhpyLfr3jAp7VaTCMXOJHu4m0Ew1CZ2fcjASwmV1jI3BWuWHB013M48eyeldk9gYg==
is-arrayish@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
- integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
+ integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==
is-buffer@~1.1.6:
version "1.1.6"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
-is-core-module@^2.8.1:
- version "2.8.1"
- resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.1.tgz#f59fdfca701d5879d0a6b100a40aa1560ce27211"
- integrity sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==
+is-core-module@^2.9.0:
+ version "2.9.0"
+ resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69"
+ integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==
dependencies:
has "^1.0.3"
@@ -1296,19 +1488,31 @@ is-finite@^1.0.0:
is-fullwidth-code-point@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb"
- integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs=
+ integrity sha512-1pqUqRjkhPJ9miNq9SwMfdvi6lBJcd6eFxvfaivQhaH3SgisfiuudvFntdKOmxuee/77l+FPjKrQjWvmPjWrRw==
dependencies:
number-is-nan "^1.0.0"
is-fullwidth-code-point@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
- integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
+ integrity sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==
+
+is-plain-obj@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"
+ integrity sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==
is-stream@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
- integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
+ integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==
+
+is-svg@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-2.1.0.tgz#cf61090da0d9efbcab8722deba6f032208dbb0e9"
+ integrity sha512-Ya1giYJUkcL/94quj0+XGcmts6cETPBW1MiFz1ReJrnDJ680F52qpAEGAEGU0nq96FRGIGPx6Yo1CyPXcOoyGw==
+ dependencies:
+ html-comment-regex "^1.1.0"
is-what@^3.14.1:
version "3.14.1"
@@ -1318,17 +1522,17 @@ is-what@^3.14.1:
isarray@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
- integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=
+ integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==
isarray@^1.0.0, isarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
- integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
+ integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
- integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
+ integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
jquery-datetimepicker@^2.5.21:
version "2.5.21"
@@ -1342,13 +1546,18 @@ jquery-datetimepicker@^2.5.21:
"jquery-mousewheel@>= 3.1.13":
version "3.1.13"
resolved "https://registry.yarnpkg.com/jquery-mousewheel/-/jquery-mousewheel-3.1.13.tgz#06f0335f16e353a695e7206bf50503cb523a6ee5"
- integrity sha1-BvAzXxbjU6aV5yBr9QUDy1I6buU=
+ integrity sha512-GXhSjfOPyDemM005YCEHvzrEALhKDIswtxSHSR2e4K/suHVJKJxxRCGz3skPjNxjJjQa9AVSGGlYjv1M3VLIPg==
-"jquery@>= 1.7.2", jquery@>=2.2:
+"jquery@>= 1.7.2", jquery@>=1.10:
version "3.6.0"
resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.6.0.tgz#c72a09f15c1bdce142f49dbf1170bdf8adac2470"
integrity sha512-JVzAR/AjBvVt2BmYhxRCSYysDsPcssdmTFnzyLEts9qNwmjmu4JTAMYubEfwVOSwpQ1I1sKKFcxhZCI2buerfw==
+js-base64@^2.1.9:
+ version "2.6.4"
+ resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.6.4.tgz#f4e686c5de1ea1f867dbcad3d46d969428df98c4"
+ integrity sha512-pZe//GGmwJndub7ZghVHz7vjb2LgC1m8B07Au3eYqeqv9emhESByMXxaEgkUkEqJe87oBbSniGYoQNIBklc7IQ==
+
"js-tokens@^3.0.0 || ^4.0.0":
version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
@@ -1357,12 +1566,20 @@ jquery-datetimepicker@^2.5.21:
js-tokens@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b"
- integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls=
+ integrity sha512-RjTcuD4xjtthQkaWH7dFlH85L+QaVtSoOyGdZ3g6HFhS9dFNDfLyqgm2NFe2X6cQpeFmt0452FJjFG5UameExg==
+
+js-yaml@~3.7.0:
+ version "3.7.0"
+ resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.7.0.tgz#5c967ddd837a9bfdca5f2de84253abe8a1c03b80"
+ integrity sha512-eIlkGty7HGmntbV6P/ZlAsoncFLGsNoM27lkTzS+oneY/EiNhj+geqD9ezg/ip+SW6Var0BJU2JtV0vEUZpWVQ==
+ dependencies:
+ argparse "^1.0.7"
+ esprima "^2.6.0"
jsesc@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b"
- integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s=
+ integrity sha512-Mke0DA0QjUWuJlhsE0ZPPhYiJkRap642SmI/4ztCFaUs6V2AiH1sfecc+57NgaryfAA2VR3v6O+CSjC1jZJKOA==
json-loader@^0.5.1, json-loader@^0.5.4:
version "0.5.7"
@@ -1382,7 +1599,7 @@ json3@^3.3.2:
json5@^0.5.0, json5@^0.5.1:
version "0.5.1"
resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821"
- integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE=
+ integrity sha512-4xrs1aW+6N5DalkqSVA8fxh458CXvR99WU8WLKmq4v8eWAL86Xo3BVqyd3SkA9wEVjCMqyvvRRkshAdOnBp5rw==
json5@^1.0.1:
version "1.0.1"
@@ -1394,7 +1611,7 @@ json5@^1.0.1:
keen-js@^3.0.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/keen-js/-/keen-js-3.5.0.tgz#9274be9bb9ee1a92867872acb3627f5aa8147b79"
- integrity sha1-knS+m7nuGpKGeHKss2J/WqgUe3k=
+ integrity sha512-lB/JZ/KfF+EL8xoUEJgGUv7adBQiQ4Q8PLI22Lzl00GqX47UmjyWnY8GETfUML5gjNxl0uqGHx2kXu0jzHHFKQ==
dependencies:
JSON2 "^0.1.0"
browserify-versionify "1.0.3"
@@ -1417,7 +1634,7 @@ knockout@3.5.1:
lcid@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835"
- integrity sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=
+ integrity sha512-YiGkH6EnGrDGqLMITnGjXtGmNtjoXw9SVUzcaos8RBi7Ps0VBylkq+vOcY9QE5poLasPCR849ucFUkl0UzUyOw==
dependencies:
invert-kv "^1.0.0"
@@ -1431,14 +1648,14 @@ less-loader@^10.2.0:
less-loader@~2.2.3:
version "2.2.3"
resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-2.2.3.tgz#b6d8f8139c8493df09d992a93a00734b08f84528"
- integrity sha1-ttj4E5yEk98J2ZKpOgBzSwj4RSg=
+ integrity sha512-U7lgRusyqTj1TUB6OBlmow6GigLk0n5ADuTSGblCp0nkXOk+lFq/lHTDXCHkm3WydZha2FVNZivEjCEZNFJCiw==
dependencies:
loader-utils "^0.2.5"
less@^4.1.2:
- version "4.1.2"
- resolved "https://registry.yarnpkg.com/less/-/less-4.1.2.tgz#6099ee584999750c2624b65f80145f8674e4b4b0"
- integrity sha512-EoQp/Et7OSOVu0aJknJOtlXZsnr8XE8KwuzTHOLeVSEx8pVWUICc8Q0VYRHgzyjX78nMEyC/oztWFbgyhtNfDA==
+ version "4.1.3"
+ resolved "https://registry.yarnpkg.com/less/-/less-4.1.3.tgz#175be9ddcbf9b250173e0a00b4d6920a5b770246"
+ integrity sha512-w16Xk/Ta9Hhyei0Gpz9m7VS8F28nieJaL/VyShID7cYvP6IL5oHeL6p4TXSDJqZE/lNv0oJ2pGVjJsRkfwm5FA==
dependencies:
copy-anything "^2.0.1"
parse-node-version "^1.0.1"
@@ -1449,13 +1666,13 @@ less@^4.1.2:
image-size "~0.5.0"
make-dir "^2.1.0"
mime "^1.4.1"
- needle "^2.5.2"
+ needle "^3.1.0"
source-map "~0.6.0"
load-json-file@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8"
- integrity sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=
+ integrity sha512-3p6ZOGNbiX4CdvEd1VcE6yi78UrGNpjHO33noGwHCnT/o2fyllJDepsm8+mFFv/DvtwFHht5HIHSyOy5a+ChVQ==
dependencies:
graceful-fs "^4.1.2"
parse-json "^2.2.0"
@@ -1470,7 +1687,7 @@ loader-runner@^2.3.0:
loader-utils@^0.2.16, loader-utils@^0.2.5:
version "0.2.17"
resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348"
- integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g=
+ integrity sha512-tiv66G0SmiOx+pLWMtGEkfSEejxvb6N6uRrQjfWJIT79W9GMpgKeCAmm9aVBKtd4WEgntciI8CsGqjpDoCWJug==
dependencies:
big.js "^3.1.3"
emojis-list "^2.0.0"
@@ -1489,12 +1706,27 @@ loader-utils@^1.0.2, loader-utils@^1.1.0:
locate-path@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
- integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=
+ integrity sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==
dependencies:
p-locate "^2.0.0"
path-exists "^3.0.0"
-lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.4:
+lodash.camelcase@^4.3.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6"
+ integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==
+
+lodash.memoize@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe"
+ integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==
+
+lodash.uniq@^4.5.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
+ integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==
+
+lodash@^4.17.14, lodash@^4.17.4:
version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
@@ -1529,6 +1761,11 @@ make-dir@^2.1.0:
pify "^4.0.1"
semver "^5.6.0"
+math-expression-evaluator@^1.2.14:
+ version "1.3.14"
+ resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-1.3.14.tgz#0ebeaccf65fea0f6f5a626f88df41814e5fcd9bf"
+ integrity sha512-M6AMrvq9bO8uL42KvQHPA2/SbAobA0R7gviUmPrcTcGfdwpaLitz4q2Euzx2lP9Oy88vxK3HOrsISgSwKsYS4A==
+
md5.js@^1.3.4:
version "1.3.5"
resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f"
@@ -1550,14 +1787,14 @@ md5@^2.2.1:
mem@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76"
- integrity sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=
+ integrity sha512-nOBDrc/wgpkd3X/JOhMqYR+/eLqlfLP4oQfoBA6QExIxEl+GU01oyEkwWyueyO8110pUKijtiHGhEmYoOn88oQ==
dependencies:
mimic-fn "^1.0.0"
memory-fs@^0.4.0, memory-fs@~0.4.1:
version "0.4.1"
resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552"
- integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI=
+ integrity sha512-cda4JKCxReDXFXRqOHPQscuIYg1PvxbE2S2GP45rnwfEK+vZaXC8C1OFvdHIbgw0DLzowXGVoxLaAmlgRy14GQ==
dependencies:
errno "^0.1.3"
readable-stream "^2.0.1"
@@ -1565,7 +1802,7 @@ memory-fs@^0.4.0, memory-fs@~0.4.1:
methods@~1.1.1:
version "1.1.2"
resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
- integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=
+ integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==
miller-rabin@^4.0.0:
version "4.0.1"
@@ -1575,22 +1812,22 @@ miller-rabin@^4.0.0:
bn.js "^4.0.0"
brorand "^1.0.1"
-mime-db@1.51.0:
- version "1.51.0"
- resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c"
- integrity sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==
+mime-db@1.52.0:
+ version "1.52.0"
+ resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
+ integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
mime-types@^2.1.3:
- version "2.1.34"
- resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24"
- integrity sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==
+ version "2.1.35"
+ resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
+ integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
dependencies:
- mime-db "1.51.0"
+ mime-db "1.52.0"
mime@1.3.4:
version "1.3.4"
resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.4.tgz#115f9e3b6b3daf2959983cb38f149a2d40eb5d53"
- integrity sha1-EV+eO2s9rylZmDyzjxSaLUDrXVM=
+ integrity sha512-sAaYXszED5ALBt665F0wMQCUXpGuZsGdopoqcHPdL39ZYdi7uHoZlhrfZfhv8WzivhBzr/oXwaj+yiK5wY8MXQ==
mime@^1.4.1:
version "1.6.0"
@@ -1610,19 +1847,19 @@ minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1:
minimalistic-crypto-utils@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a"
- integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=
+ integrity sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==
-minimatch@^3.0.4:
- version "3.0.5"
- resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.5.tgz#4da8f1290ee0f0f8e83d60ca69f8f134068604a3"
- integrity sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw==
+minimatch@^3.0.4, minimatch@^3.1.1:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
+ integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==
dependencies:
brace-expansion "^1.1.7"
-minimist@^1.2.0, minimist@^1.2.5:
- version "1.2.5"
- resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
- integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==
+minimist@^1.2.0, minimist@^1.2.6:
+ version "1.2.6"
+ resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
+ integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
mississippi@^2.0.0:
version "2.0.0"
@@ -1640,17 +1877,17 @@ mississippi@^2.0.0:
stream-each "^1.1.0"
through2 "^2.0.0"
-mkdirp@^0.5.1, mkdirp@~0.5.0:
- version "0.5.5"
- resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
- integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
+mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1:
+ version "0.5.6"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6"
+ integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==
dependencies:
- minimist "^1.2.5"
+ minimist "^1.2.6"
move-concurrently@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92"
- integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I=
+ integrity sha512-hdrFxZOycD/g6A6SoI2bB5NA/5NEqD0569+S47WZhPvm46sD50ZHdYaFmnua5lndde9rCHGjmfK7Z8BuCt/PcQ==
dependencies:
aproba "^1.1.1"
copy-concurrently "^1.0.0"
@@ -1662,26 +1899,26 @@ move-concurrently@^1.0.1:
ms@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
- integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
+ integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==
ms@^2.1.1:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
-needle@^2.5.2:
- version "2.9.1"
- resolved "https://registry.yarnpkg.com/needle/-/needle-2.9.1.tgz#22d1dffbe3490c2b83e301f7709b6736cd8f2684"
- integrity sha512-6R9fqJ5Zcmf+uYaFgdIHmLwNldn5HbK8L5ybn7Uz+ylX/rnOsSp1AHcvQSrCaFN+qNM1wpymHqD7mVasEOlHGQ==
+needle@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/needle/-/needle-3.1.0.tgz#3bf5cd090c28eb15644181ab6699e027bd6c53c9"
+ integrity sha512-gCE9weDhjVGCRqS8dwDR/D3GTAeyXLXuqp7I8EzH6DllZGXSUyxuqqLh+YX9rMAWaaTFyVAg6rHGL25dqvczKw==
dependencies:
debug "^3.2.6"
- iconv-lite "^0.4.4"
+ iconv-lite "^0.6.3"
sax "^1.2.4"
-next-tick@~1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c"
- integrity sha1-yobR/ogoFpsBICCOPchCS524NCw=
+next-tick@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb"
+ integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==
node-libs-browser@^2.0.0:
version "2.2.1"
@@ -1722,39 +1959,59 @@ normalize-package-data@^2.3.2:
semver "2 || 3 || 4 || 5"
validate-npm-package-license "^3.0.1"
+normalize-range@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942"
+ integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==
+
+normalize-url@^1.4.0:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-1.9.1.tgz#2cc0d66b31ea23036458436e3620d85954c66c3c"
+ integrity sha512-A48My/mtCklowHBlI8Fq2jFWK4tX4lJ5E6ytFsSOq1fzpvT0SQSgKhSg7lN5c2uYFOrUAOQp6zhhJnpp1eMloQ==
+ dependencies:
+ object-assign "^4.0.1"
+ prepend-http "^1.0.0"
+ query-string "^4.1.0"
+ sort-keys "^1.0.0"
+
npm-run-path@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
- integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=
+ integrity sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==
dependencies:
path-key "^2.0.0"
+num2fraction@^1.2.2:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede"
+ integrity sha512-Y1wZESM7VUThYY+4W+X4ySH2maqcA+p7UR+w8VWNWVAd6lwuXXWz/w/Cz43J/dI2I+PS6wD5N+bJUF+gjWvIqg==
+
number-is-nan@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
- integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=
+ integrity sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ==
-object-assign@^4.0.1, object-assign@^4.1.1:
+object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
- integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
+ integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==
once@^1.3.0, once@^1.3.1, once@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
- integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
+ integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==
dependencies:
wrappy "1"
os-browserify@^0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27"
- integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=
+ integrity sha512-gjcpUc3clBf9+210TRaDWbf+rZZZEshZ+DlXMRCeAjp0xhTrnQsKHypIy1J3d5hKdUzj69t708EHtU8P6bUn0A==
os-homedir@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
- integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M=
+ integrity sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==
os-locale@^2.0.0:
version "2.1.0"
@@ -1768,12 +2025,12 @@ os-locale@^2.0.0:
os-tmpdir@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
- integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=
+ integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==
p-finally@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
- integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=
+ integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==
p-limit@^1.1.0:
version "1.3.0"
@@ -1785,14 +2042,14 @@ p-limit@^1.1.0:
p-locate@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
- integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=
+ integrity sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==
dependencies:
p-limit "^1.1.0"
p-try@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3"
- integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=
+ integrity sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==
pako@~1.0.5:
version "1.0.11"
@@ -1822,7 +2079,7 @@ parse-asn1@^5.0.0, parse-asn1@^5.1.5:
parse-json@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9"
- integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=
+ integrity sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==
dependencies:
error-ex "^1.2.0"
@@ -1839,24 +2096,24 @@ path-browserify@0.0.1:
path-exists@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b"
- integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=
+ integrity sha512-yTltuKuhtNeFJKa1PiRzfLAU5182q1y4Eb4XCJ3PBqyzEDkAZRzBrKKBct682ls9reBVHf9udYLN5Nd+K1B9BQ==
dependencies:
pinkie-promise "^2.0.0"
path-exists@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
- integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
+ integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==
path-is-absolute@^1.0.0, path-is-absolute@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
- integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
+ integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==
path-key@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
- integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
+ integrity sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==
path-parse@^1.0.7:
version "1.0.7"
@@ -1866,7 +2123,7 @@ path-parse@^1.0.7:
path-type@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73"
- integrity sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=
+ integrity sha512-dUnb5dXUf+kzhC/W/F4e5/SkluXIFf5VUHolW1Eg1irn1hGWjPGdsRcvYJ1nD6lhk8Ir7VM0bHJKsYTx8Jx9OQ==
dependencies:
pify "^2.0.0"
@@ -1889,12 +2146,12 @@ php-date-formatter@^1.3.4:
pify@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
- integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
+ integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==
pify@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176"
- integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=
+ integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==
pify@^4.0.1:
version "4.0.1"
@@ -1904,33 +2161,166 @@ pify@^4.0.1:
pinkie-promise@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa"
- integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o=
+ integrity sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==
dependencies:
pinkie "^2.0.0"
pinkie@^2.0.0:
version "2.0.4"
resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870"
- integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA=
+ integrity sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==
pkg-dir@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-1.0.0.tgz#7a4b508a8d5bb2d629d447056ff4e9c9314cf3d4"
- integrity sha1-ektQio1bstYp1EcFb/TpyTFM89Q=
+ integrity sha512-c6pv3OE78mcZ92ckebVDqg0aWSoKhOTbwCV6qbCWMk546mAL9pZln0+QsN/yQ7fkucd4+yJPLrCBXNt8Ruk+Eg==
dependencies:
find-up "^1.0.0"
pkg-dir@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b"
- integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=
+ integrity sha512-ojakdnUgL5pzJYWw2AIDEupaQCX5OPbM688ZevubICjdIX01PRSYKqm33fJoCOJBRseYCTUlQRnBNX+Pchaejw==
dependencies:
find-up "^2.1.0"
-popper.js@>=1.10:
- version "1.16.1"
- resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.16.1.tgz#2a223cb3dc7b6213d740e40372be40de43e65b1b"
- integrity sha512-Wb4p1J4zyFTbM+u6WuO4XstYx4Ky9Cewe4DWrel7B0w6VVICvPwdOpotjzcf6eD8TsckVnIMNONQyPIUFOUbCQ==
+postcss-calc@^5.2.0:
+ version "5.3.1"
+ resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-5.3.1.tgz#77bae7ca928ad85716e2fda42f261bf7c1d65b5e"
+ integrity sha512-iBcptYFq+QUh9gzP7ta2btw50o40s4uLI4UDVgd5yRAZtUDWc5APdl5yQDd2h/TyiZNbJrv0HiYhT102CMgN7Q==
+ dependencies:
+ postcss "^5.0.2"
+ postcss-message-helpers "^2.0.0"
+ reduce-css-calc "^1.2.6"
+
+postcss-colormin@^2.1.8:
+ version "2.2.2"
+ resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-2.2.2.tgz#6631417d5f0e909a3d7ec26b24c8a8d1e4f96e4b"
+ integrity sha512-XXitQe+jNNPf+vxvQXIQ1+pvdQKWKgkx8zlJNltcMEmLma1ypDRDQwlLt+6cP26fBreihNhZxohh1rcgCH2W5w==
+ dependencies:
+ colormin "^1.0.5"
+ postcss "^5.0.13"
+ postcss-value-parser "^3.2.3"
+
+postcss-convert-values@^2.3.4:
+ version "2.6.1"
+ resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-2.6.1.tgz#bbd8593c5c1fd2e3d1c322bb925dcae8dae4d62d"
+ integrity sha512-SE7mf25D3ORUEXpu3WUqQqy0nCbMuM5BEny+ULE/FXdS/0UMA58OdzwvzuHJRpIFlk1uojt16JhaEogtP6W2oA==
+ dependencies:
+ postcss "^5.0.11"
+ postcss-value-parser "^3.1.2"
+
+postcss-discard-comments@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-2.0.4.tgz#befe89fafd5b3dace5ccce51b76b81514be00e3d"
+ integrity sha512-yGbyBDo5FxsImE90LD8C87vgnNlweQkODMkUZlDVM/CBgLr9C5RasLGJxxh9GjVOBeG8NcCMatoqI1pXg8JNXg==
+ dependencies:
+ postcss "^5.0.14"
+
+postcss-discard-duplicates@^2.0.1:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-2.1.0.tgz#b9abf27b88ac188158a5eb12abcae20263b91932"
+ integrity sha512-+lk5W1uqO8qIUTET+UETgj9GWykLC3LOldr7EehmymV0Wu36kyoHimC4cILrAAYpHQ+fr4ypKcWcVNaGzm0reA==
+ dependencies:
+ postcss "^5.0.4"
+
+postcss-discard-empty@^2.0.1:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-2.1.0.tgz#d2b4bd9d5ced5ebd8dcade7640c7d7cd7f4f92b5"
+ integrity sha512-IBFoyrwk52dhF+5z/ZAbzq5Jy7Wq0aLUsOn69JNS+7YeuyHaNzJwBIYE0QlUH/p5d3L+OON72Fsexyb7OK/3og==
+ dependencies:
+ postcss "^5.0.14"
+
+postcss-discard-overridden@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-0.1.1.tgz#8b1eaf554f686fb288cd874c55667b0aa3668d58"
+ integrity sha512-IyKoDL8QNObOiUc6eBw8kMxBHCfxUaERYTUe2QF8k7j/xiirayDzzkmlR6lMQjrAM1p1DDRTvWrS7Aa8lp6/uA==
+ dependencies:
+ postcss "^5.0.16"
+
+postcss-discard-unused@^2.2.1:
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/postcss-discard-unused/-/postcss-discard-unused-2.2.3.tgz#bce30b2cc591ffc634322b5fb3464b6d934f4433"
+ integrity sha512-nCbFNfqYAbKCw9J6PSJubpN9asnrwVLkRDFc4KCwyUEdOtM5XDE/eTW3OpqHrYY1L4fZxgan7LLRAAYYBzwzrg==
+ dependencies:
+ postcss "^5.0.14"
+ uniqs "^2.0.0"
+
+postcss-filter-plugins@^2.0.0:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-filter-plugins/-/postcss-filter-plugins-2.0.3.tgz#82245fdf82337041645e477114d8e593aa18b8ec"
+ integrity sha512-T53GVFsdinJhgwm7rg1BzbeBRomOg9y5MBVhGcsV0CxurUdVj1UlPdKtn7aqYA/c/QVkzKMjq2bSV5dKG5+AwQ==
+ dependencies:
+ postcss "^5.0.4"
+
+postcss-merge-idents@^2.1.5:
+ version "2.1.7"
+ resolved "https://registry.yarnpkg.com/postcss-merge-idents/-/postcss-merge-idents-2.1.7.tgz#4c5530313c08e1d5b3bbf3d2bbc747e278eea270"
+ integrity sha512-9DHmfCZ7/hNHhIKnNkz4CU0ejtGen5BbTRJc13Z2uHfCedeCUsK2WEQoAJRBL+phs68iWK6Qf8Jze71anuysWA==
+ dependencies:
+ has "^1.0.1"
+ postcss "^5.0.10"
+ postcss-value-parser "^3.1.1"
+
+postcss-merge-longhand@^2.0.1:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-2.0.2.tgz#23d90cd127b0a77994915332739034a1a4f3d658"
+ integrity sha512-ma7YvxjdLQdifnc1HFsW/AW6fVfubGyR+X4bE3FOSdBVMY9bZjKVdklHT+odknKBB7FSCfKIHC3yHK7RUAqRPg==
+ dependencies:
+ postcss "^5.0.4"
+
+postcss-merge-rules@^2.0.3:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-2.1.2.tgz#d1df5dfaa7b1acc3be553f0e9e10e87c61b5f721"
+ integrity sha512-Wgg2FS6W3AYBl+5L9poL6ZUISi5YzL+sDCJfM7zNw/Q1qsyVQXXZ2cbVui6mu2cYJpt1hOKCGj1xA4mq/obz/Q==
+ dependencies:
+ browserslist "^1.5.2"
+ caniuse-api "^1.5.2"
+ postcss "^5.0.4"
+ postcss-selector-parser "^2.2.2"
+ vendors "^1.0.0"
+
+postcss-message-helpers@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-message-helpers/-/postcss-message-helpers-2.0.0.tgz#a4f2f4fab6e4fe002f0aed000478cdf52f9ba60e"
+ integrity sha512-tPLZzVAiIJp46TBbpXtrUAKqedXSyW5xDEo1sikrfEfnTs+49SBZR/xDdqCiJvSSbtr615xDsaMF3RrxS2jZlA==
+
+postcss-minify-font-values@^1.0.2:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-1.0.5.tgz#4b58edb56641eba7c8474ab3526cafd7bbdecb69"
+ integrity sha512-vFSPzrJhNe6/8McOLU13XIsERohBJiIFFuC1PolgajOZdRWqRgKITP/A4Z/n4GQhEmtbxmO9NDw3QLaFfE1dFQ==
+ dependencies:
+ object-assign "^4.0.1"
+ postcss "^5.0.4"
+ postcss-value-parser "^3.0.2"
+
+postcss-minify-gradients@^1.0.1:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-1.0.5.tgz#5dbda11373703f83cfb4a3ea3881d8d75ff5e6e1"
+ integrity sha512-DZhT0OE+RbVqVyGsTIKx84rU/5cury1jmwPa19bViqYPQu499ZU831yMzzsyC8EhiZVd73+h5Z9xb/DdaBpw7Q==
+ dependencies:
+ postcss "^5.0.12"
+ postcss-value-parser "^3.3.0"
+
+postcss-minify-params@^1.0.4:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-1.2.2.tgz#ad2ce071373b943b3d930a3fa59a358c28d6f1f3"
+ integrity sha512-hhJdMVgP8vasrHbkKAk+ab28vEmPYgyuDzRl31V3BEB3QOR3L5TTIVEWLDNnZZ3+fiTi9d6Ker8GM8S1h8p2Ow==
+ dependencies:
+ alphanum-sort "^1.0.1"
+ postcss "^5.0.2"
+ postcss-value-parser "^3.0.2"
+ uniqs "^2.0.0"
+
+postcss-minify-selectors@^2.0.4:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-2.1.1.tgz#b2c6a98c0072cf91b932d1a496508114311735bf"
+ integrity sha512-e13vxPBSo3ZaPne43KVgM+UETkx3Bs4/Qvm6yXI9HQpQp4nyb7HZ0gKpkF+Wn2x+/dbQ+swNpCdZSbMOT7+TIA==
+ dependencies:
+ alphanum-sort "^1.0.2"
+ has "^1.0.1"
+ postcss "^5.0.14"
+ postcss-selector-parser "^2.0.0"
postcss-modules-extract-imports@^1.2.0:
version "1.2.1"
@@ -1942,7 +2332,7 @@ postcss-modules-extract-imports@^1.2.0:
postcss-modules-local-by-default@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz#f7d80c398c5a393fa7964466bd19500a7d61c069"
- integrity sha1-99gMOYxaOT+nlkRmvRlQCn1hwGk=
+ integrity sha512-X4cquUPIaAd86raVrBwO8fwRfkIdbwFu7CTfEOjiZQHVQwlHRSkTgH5NLDmMm5+1hQO8u6dZ+TOOJDbay1hYpA==
dependencies:
css-selector-tokenizer "^0.7.0"
postcss "^6.0.1"
@@ -1950,7 +2340,7 @@ postcss-modules-local-by-default@^1.2.0:
postcss-modules-scope@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz#d6ea64994c79f97b62a72b426fbe6056a194bb90"
- integrity sha1-1upkmUx5+XtipytCb75gVqGUu5A=
+ integrity sha512-LTYwnA4C1He1BKZXIx1CYiHixdSe9LWYVKadq9lK5aCCMkoOkFyZ7aigt+srfjlRplJY3gIol6KUNefdMQJdlw==
dependencies:
css-selector-tokenizer "^0.7.0"
postcss "^6.0.1"
@@ -1958,17 +2348,113 @@ postcss-modules-scope@^1.1.0:
postcss-modules-values@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz#ecffa9d7e192518389f42ad0e83f72aec456ea20"
- integrity sha1-7P+p1+GSUYOJ9CrQ6D9yrsRW6iA=
+ integrity sha512-i7IFaR9hlQ6/0UgFuqM6YWaCfA1Ej8WMg8A5DggnH1UGKJvTV/ugqq/KaULixzzOi3T/tF6ClBXcHGCzdd5unA==
dependencies:
icss-replace-symbols "^1.1.0"
postcss "^6.0.1"
-postcss-value-parser@^3.3.0:
+postcss-normalize-charset@^1.1.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-1.1.1.tgz#ef9ee71212d7fe759c78ed162f61ed62b5cb93f1"
+ integrity sha512-RKgjEks83l8w4yEhztOwNZ+nLSrJ+NvPNhpS+mVDzoaiRHZQVoG7NF2TP5qjwnaN9YswUhj6m1E0S0Z+WDCgEQ==
+ dependencies:
+ postcss "^5.0.5"
+
+postcss-normalize-url@^3.0.7:
+ version "3.0.8"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-3.0.8.tgz#108f74b3f2fcdaf891a2ffa3ea4592279fc78222"
+ integrity sha512-WqtWG6GV2nELsQEFES0RzfL2ebVwmGl/M8VmMbshKto/UClBo+mznX8Zi4/hkThdqx7ijwv+O8HWPdpK7nH/Ig==
+ dependencies:
+ is-absolute-url "^2.0.0"
+ normalize-url "^1.4.0"
+ postcss "^5.0.14"
+ postcss-value-parser "^3.2.3"
+
+postcss-ordered-values@^2.1.0:
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-2.2.3.tgz#eec6c2a67b6c412a8db2042e77fe8da43f95c11d"
+ integrity sha512-5RB1IUZhkxDCfa5fx/ogp/A82mtq+r7USqS+7zt0e428HJ7+BHCxyeY39ClmkkUtxdOd3mk8gD6d9bjH2BECMg==
+ dependencies:
+ postcss "^5.0.4"
+ postcss-value-parser "^3.0.1"
+
+postcss-reduce-idents@^2.2.2:
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-idents/-/postcss-reduce-idents-2.4.0.tgz#c2c6d20cc958284f6abfbe63f7609bf409059ad3"
+ integrity sha512-0+Ow9e8JLtffjumJJFPqvN4qAvokVbdQPnijUDSOX8tfTwrILLP4ETvrZcXZxAtpFLh/U0c+q8oRMJLr1Kiu4w==
+ dependencies:
+ postcss "^5.0.4"
+ postcss-value-parser "^3.0.2"
+
+postcss-reduce-initial@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-1.0.1.tgz#68f80695f045d08263a879ad240df8dd64f644ea"
+ integrity sha512-jJFrV1vWOPCQsIVitawGesRgMgunbclERQ/IRGW7r93uHrVzNQQmHQ7znsOIjJPZ4yWMzs5A8NFhp3AkPHPbDA==
+ dependencies:
+ postcss "^5.0.4"
+
+postcss-reduce-transforms@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-1.0.4.tgz#ff76f4d8212437b31c298a42d2e1444025771ae1"
+ integrity sha512-lGgRqnSuAR5i5uUg1TA33r9UngfTadWxOyL2qx1KuPoCQzfmtaHjp9PuwX7yVyRxG3BWBzeFUaS5uV9eVgnEgQ==
+ dependencies:
+ has "^1.0.1"
+ postcss "^5.0.8"
+ postcss-value-parser "^3.0.1"
+
+postcss-selector-parser@^2.0.0, postcss-selector-parser@^2.2.2:
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-2.2.3.tgz#f9437788606c3c9acee16ffe8d8b16297f27bb90"
+ integrity sha512-3pqyakeGhrO0BQ5+/tGTfvi5IAUAhHRayGK8WFSu06aEv2BmHoXw/Mhb+w7VY5HERIuC+QoUI7wgrCcq2hqCVA==
+ dependencies:
+ flatten "^1.0.2"
+ indexes-of "^1.0.1"
+ uniq "^1.0.1"
+
+postcss-svgo@^2.1.1:
+ version "2.1.6"
+ resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-2.1.6.tgz#b6df18aa613b666e133f08adb5219c2684ac108d"
+ integrity sha512-y5AdQdgBoF4rbpdbeWAJuxE953g/ylRfVNp6mvAi61VCN/Y25Tu9p5mh3CyI42WbTRIiwR9a1GdFtmDnNPeskQ==
+ dependencies:
+ is-svg "^2.0.0"
+ postcss "^5.0.14"
+ postcss-value-parser "^3.2.3"
+ svgo "^0.7.0"
+
+postcss-unique-selectors@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-2.0.2.tgz#981d57d29ddcb33e7b1dfe1fd43b8649f933ca1d"
+ integrity sha512-WZX8r1M0+IyljoJOJleg3kYm10hxNYF9scqAT7v/xeSX1IdehutOM85SNO0gP9K+bgs86XERr7Ud5u3ch4+D8g==
+ dependencies:
+ alphanum-sort "^1.0.1"
+ postcss "^5.0.4"
+ uniqs "^2.0.0"
+
+postcss-value-parser@^3.0.1, postcss-value-parser@^3.0.2, postcss-value-parser@^3.1.1, postcss-value-parser@^3.1.2, postcss-value-parser@^3.2.3, postcss-value-parser@^3.3.0:
version "3.3.1"
resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281"
integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==
-postcss@^6.0.1, postcss@^6.0.23:
+postcss-zindex@^2.0.1:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-zindex/-/postcss-zindex-2.2.0.tgz#d2109ddc055b91af67fc4cb3b025946639d2af22"
+ integrity sha512-uhRZ2hRgj0lorxm9cr62B01YzpUe63h0RXMXQ4gWW3oa2rpJh+FJAiEAytaFCPU/VgaBS+uW2SJ1XKyDNz1h4w==
+ dependencies:
+ has "^1.0.1"
+ postcss "^5.0.4"
+ uniqs "^2.0.0"
+
+postcss@^5.0.10, postcss@^5.0.11, postcss@^5.0.12, postcss@^5.0.13, postcss@^5.0.14, postcss@^5.0.16, postcss@^5.0.2, postcss@^5.0.4, postcss@^5.0.5, postcss@^5.0.6, postcss@^5.0.8, postcss@^5.2.16:
+ version "5.2.18"
+ resolved "https://registry.yarnpkg.com/postcss/-/postcss-5.2.18.tgz#badfa1497d46244f6390f58b319830d9107853c5"
+ integrity sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==
+ dependencies:
+ chalk "^1.1.3"
+ js-base64 "^2.1.9"
+ source-map "^0.5.6"
+ supports-color "^3.2.3"
+
+postcss@^6.0.1:
version "6.0.23"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.23.tgz#61c82cc328ac60e677645f979054eb98bc0e3324"
integrity sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==
@@ -1977,6 +2463,11 @@ postcss@^6.0.1, postcss@^6.0.23:
source-map "^0.6.1"
supports-color "^5.4.0"
+prepend-http@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc"
+ integrity sha512-PhmXi5XmoyKw1Un4E+opM2KcsJInDvKyuOumcjjw3waw86ZNjHwVUOOWLc4bCzLdcKNaWBH9e99sbWzDQsVaYg==
+
private@^0.1.8:
version "0.1.8"
resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff"
@@ -1990,22 +2481,22 @@ process-nextick-args@~2.0.0:
process@^0.11.10:
version "0.11.10"
resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
- integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI=
+ integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==
promise-inflight@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3"
- integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM=
+ integrity sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==
prr@~1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476"
- integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY=
+ integrity sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==
pseudomap@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
- integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM=
+ integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==
public-encrypt@^4.0.0:
version "4.0.3"
@@ -2039,32 +2530,45 @@ pumpify@^1.3.3:
punycode@1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
- integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=
+ integrity sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==
punycode@^1.2.4:
version "1.4.1"
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
- integrity sha1-wNWmOycYgArY4esPpSachN1BhF4=
+ integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==
punycode@^2.1.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
+q@^1.1.2:
+ version "1.5.1"
+ resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
+ integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==
+
qs@2.3.3:
version "2.3.3"
resolved "https://registry.yarnpkg.com/qs/-/qs-2.3.3.tgz#e9e85adbe75da0bbe4c8e0476a086290f863b404"
- integrity sha1-6eha2+ddoLvkyOBHaghikPhjtAQ=
+ integrity sha512-f5M0HQqZWkzU8GELTY8LyMrGkr3bPjKoFtTkwUEqJQbcljbeK8M7mliP9Ia2xoOI6oMerp+QPS7oYJtpGmWe/A==
+
+query-string@^4.1.0:
+ version "4.3.4"
+ resolved "https://registry.yarnpkg.com/query-string/-/query-string-4.3.4.tgz#bbb693b9ca915c232515b228b1a02b609043dbeb"
+ integrity sha512-O2XLNDBIg1DnTOa+2XrIwSiXEV8h2KImXUnjhhn2+UsvZ+Es2uyd5CCRTNQlDGbzUQOW3aYCBx9rVA6dzsiY7Q==
+ dependencies:
+ object-assign "^4.1.0"
+ strict-uri-encode "^1.0.0"
querystring-es3@^0.2.0:
version "0.2.1"
resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73"
- integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=
+ integrity sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==
querystring@0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
- integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=
+ integrity sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==
randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5:
version "2.1.0"
@@ -2095,7 +2599,7 @@ raven@^2.6.4:
read-pkg-up@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be"
- integrity sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=
+ integrity sha512-1orxQfbWGUiTn9XsPlChs6rLie/AV9jwZTGmu2NZw/CUDJQchXJFYE0Fq5j7+n558T1JhDWLdhyd1Zj+wLY//w==
dependencies:
find-up "^2.0.0"
read-pkg "^2.0.0"
@@ -2103,7 +2607,7 @@ read-pkg-up@^2.0.0:
read-pkg@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8"
- integrity sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=
+ integrity sha512-eFIBOPW7FGjzBuk3hdXEuNSiTZS/xEMlH49HxMyzb0hyPfu4EhVjT2DH32K1hSSmVq4sebAWnZuuY5auISUTGA==
dependencies:
load-json-file "^2.0.0"
normalize-package-data "^2.3.2"
@@ -2125,7 +2629,7 @@ read-pkg@^2.0.0:
readable-stream@1.0.27-1:
version "1.0.27-1"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.27-1.tgz#6b67983c20357cefd07f0165001a16d710d91078"
- integrity sha1-a2eYPCA1fO/QfwFlABoW1xDZEHg=
+ integrity sha512-uQE31HGhpMrqZwtDjRliOs2aC3XBi+DdkhLs+Xa0dvVD5eDiZr3+k8rKVZcyTzxosgtMw7B/twQsK3P1KTZeVg==
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.1"
@@ -2135,7 +2639,7 @@ readable-stream@1.0.27-1:
"readable-stream@>=1.0.33-1 <1.1.0-0":
version "1.0.34"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c"
- integrity sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=
+ integrity sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg==
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.1"
@@ -2154,7 +2658,23 @@ readable-stream@^3.6.0:
reduce-component@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/reduce-component/-/reduce-component-1.0.1.tgz#e0c93542c574521bea13df0f9488ed82ab77c5da"
- integrity sha1-4Mk1QsV0UhvqE98PlIjtgqt3xdo=
+ integrity sha512-y0wyCcdQul3hI3xHfIs0vg/jSbboQc/YTOAqaxjFG7At+XSexduuOqBVL9SmOLSwa/ldkbzVzdwuk9s2EKTAZg==
+
+reduce-css-calc@^1.2.6:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-1.3.0.tgz#747c914e049614a4c9cfbba629871ad1d2927716"
+ integrity sha512-0dVfwYVOlf/LBA2ec4OwQ6p3X9mYxn/wOl2xTcLwjnPYrkgEfPx3VI4eGCH3rQLlPISG5v9I9bkZosKsNRTRKA==
+ dependencies:
+ balanced-match "^0.4.2"
+ math-expression-evaluator "^1.2.14"
+ reduce-function-call "^1.0.1"
+
+reduce-function-call@^1.0.1:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/reduce-function-call/-/reduce-function-call-1.0.3.tgz#60350f7fb252c0a67eb10fd4694d16909971300f"
+ integrity sha512-Hl/tuV2VDgWgCSEeWMLwxLZqX7OK59eU1guxXsRKTAyeYimivsKdtcV4fu3r710tpG5GmDKDhQ0HSZLExnNmyQ==
+ dependencies:
+ balanced-match "^1.0.0"
regenerator-runtime@^0.11.0:
version "0.11.1"
@@ -2164,26 +2684,26 @@ regenerator-runtime@^0.11.0:
repeating@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda"
- integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo=
+ integrity sha512-ZqtSMuVybkISo2OWvqvm7iHSWngvdaW3IpsT9/uP8v4gMi591LY6h35wdOfvQdWCKFWZWm2Y1Opp4kV7vQKT6A==
dependencies:
is-finite "^1.0.0"
require-directory@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
- integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
+ integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==
require-main-filename@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1"
- integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=
+ integrity sha512-IqSUtOVP4ksd1C/ej5zeEh/BIP2ajqpn8c5x+q99gvcIG/Qf0cud5raVnE/Dwd0ua9TXYDoDc0RE5hBSdz22Ug==
resolve@^1.10.0:
- version "1.22.0"
- resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198"
- integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==
+ version "1.22.1"
+ resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177"
+ integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==
dependencies:
- is-core-module "^2.8.1"
+ is-core-module "^2.9.0"
path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0"
@@ -2205,7 +2725,7 @@ ripemd160@^2.0.0, ripemd160@^2.0.1:
run-queue@^1.0.0, run-queue@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47"
- integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec=
+ integrity sha512-ntymy489o0/QQplUDnpYAYUsO50K9SBrIVaKCWDOJzYJts0f9WH9RFJkyagebkw5+y1oi00R7ynNW/d12GBumg==
dependencies:
aproba "^1.1.1"
@@ -2219,12 +2739,12 @@ safe-buffer@~5.1.0, safe-buffer@~5.1.1:
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
-"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.1.0:
+"safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.1.0:
version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
-sax@^1.2.4:
+sax@^1.2.4, sax@~1.2.1:
version "1.2.4"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
@@ -2259,12 +2779,12 @@ serialize-javascript@^1.4.0:
set-blocking@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
- integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
+ integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==
setimmediate@^1.0.4:
version "1.0.5"
resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285"
- integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=
+ integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==
sha.js@^2.4.0, sha.js@^2.4.8:
version "2.4.11"
@@ -2277,14 +2797,14 @@ sha.js@^2.4.0, sha.js@^2.4.8:
shebang-command@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
- integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
+ integrity sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==
dependencies:
shebang-regex "^1.0.0"
shebang-regex@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
- integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
+ integrity sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==
signal-exit@^3.0.0:
version "3.0.7"
@@ -2299,7 +2819,14 @@ simple-statistics@7.7.3:
slash@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55"
- integrity sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=
+ integrity sha512-3TYDR7xWt4dIqV2JauJr+EJeW356RXijHeUlO+8djJ+uBXPn8/2dpzBc8yQhh583sVvc9CvFAeQVgijsH+PNNg==
+
+sort-keys@^1.0.0:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad"
+ integrity sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg==
+ dependencies:
+ is-plain-obj "^1.0.0"
source-list-map@^2.0.0:
version "2.0.1"
@@ -2316,7 +2843,7 @@ source-map-support@^0.4.15:
source-map@^0.5.3, source-map@^0.5.6, source-map@^0.5.7:
version "0.5.7"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
- integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=
+ integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==
source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1:
version "0.6.1"
@@ -2352,7 +2879,12 @@ spdx-license-ids@^3.0.0:
spin.js@^2.0.1:
version "2.3.2"
resolved "https://registry.yarnpkg.com/spin.js/-/spin.js-2.3.2.tgz#6caa56d520673450fd5cfbc6971e6d0772c37a1a"
- integrity sha1-bKpW1SBnNFD9XPvGlx5tB3LDeho=
+ integrity sha512-ryhCvKCRa6J5Fxa7Y+fnhE2a+e05JwfW5dxO82zPd0uDM9o+qp8p74BJUurjiqCqmDsWNvGOAxfqdD317XIedg==
+
+sprintf-js@~1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
+ integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
ssri@^5.2.4:
version "5.3.0"
@@ -2364,7 +2896,7 @@ ssri@^5.2.4:
stack-trace@0.0.10:
version "0.0.10"
resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0"
- integrity sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=
+ integrity sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==
stream-browserify@^2.0.1:
version "2.0.2"
@@ -2398,10 +2930,15 @@ stream-shift@^1.0.0:
resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d"
integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==
+strict-uri-encode@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713"
+ integrity sha512-R3f198pcvnB+5IpnBlRkphuE9n46WyVl8I39W/ZUTZLz4nqSP/oLYUrcnJrw462Ds8he4YKMov2efsTIw1BDGQ==
+
string-width@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3"
- integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=
+ integrity sha512-0XsVpQLnVCXHJfyEs8tC0zpTVIr5PKKsQtkT29IwupnPTjtPmQ3xT/4yCREF9hYkV/3M3kzcUTSAZT6a6h81tw==
dependencies:
code-point-at "^1.0.0"
is-fullwidth-code-point "^1.0.0"
@@ -2425,7 +2962,7 @@ string_decoder@^1.0.0, string_decoder@^1.1.1:
string_decoder@~0.10.x:
version "0.10.31"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94"
- integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=
+ integrity sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==
string_decoder@~1.1.1:
version "1.1.1"
@@ -2437,33 +2974,33 @@ string_decoder@~1.1.1:
strip-ansi@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-2.0.1.tgz#df62c1aa94ed2f114e1d0f21fd1d50482b79a60e"
- integrity sha1-32LBqpTtLxFOHQ8h/R1QSCt5pg4=
+ integrity sha512-2h8q2CP3EeOhDJ+jd932PRMpa3/pOJFGoF22J1U/DNbEK2gSW2DqeF46VjCXsSQXhC+k/l8/gaaRBQKL6hUPfQ==
dependencies:
ansi-regex "^1.0.0"
strip-ansi@^3.0.0, strip-ansi@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
- integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=
+ integrity sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==
dependencies:
ansi-regex "^2.0.0"
strip-ansi@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
- integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8=
+ integrity sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==
dependencies:
ansi-regex "^3.0.0"
strip-bom@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
- integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=
+ integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==
strip-eof@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
- integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
+ integrity sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==
style-loader@^0.23.1, style-loader@~0.13.1:
version "0.23.1"
@@ -2476,7 +3013,7 @@ style-loader@^0.23.1, style-loader@~0.13.1:
superagent@^1.8.3:
version "1.8.5"
resolved "https://registry.yarnpkg.com/superagent/-/superagent-1.8.5.tgz#1c0ddc3af30e80eb84ebc05cb2122da8fe940b55"
- integrity sha1-HA3cOvMOgOuE68BcshItqP6UC1U=
+ integrity sha512-4h4R6fISQXvgjIqZ8DjONYy3y2XPxgZO0LgHsBI6tDAEhzJLpWuK+thM60SmUiERJOEJzmxlIGx/GP6+azky/A==
dependencies:
component-emitter "~1.2.0"
cookiejar "2.0.6"
@@ -2493,12 +3030,19 @@ superagent@^1.8.3:
supports-color@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
- integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=
+ integrity sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g==
+
+supports-color@^3.2.3:
+ version "3.2.3"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6"
+ integrity sha512-Jds2VIYDrlp5ui7t8abHN2bjAu4LV/q4N2KivFPpGH0lrka0BMq/33AmECUXlKPcHigkNaqfXRENFju+rlcy+A==
+ dependencies:
+ has-flag "^1.0.0"
supports-color@^4.2.1:
version "4.5.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.5.0.tgz#be7a0de484dec5c5cddf8b3d59125044912f635b"
- integrity sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=
+ integrity sha512-ycQR/UbvI9xIlEdQT1TQqwoXtEldExbCEAJgRo5YXlmSKjv6ThHnP9/vwGa1gr19Gfw+LkFd7KqYMhzrRC5JYw==
dependencies:
has-flag "^2.0.0"
@@ -2514,6 +3058,19 @@ supports-preserve-symlinks-flag@^1.0.0:
resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09"
integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==
+svgo@^0.7.0:
+ version "0.7.2"
+ resolved "https://registry.yarnpkg.com/svgo/-/svgo-0.7.2.tgz#9f5772413952135c6fefbf40afe6a4faa88b4bb5"
+ integrity sha512-jT/g9FFMoe9lu2IT6HtAxTA7RR2XOrmcrmCtGnyB/+GQnV6ZjNn+KOHZbZ35yL81+1F/aB6OeEsJztzBQ2EEwA==
+ dependencies:
+ coa "~1.0.1"
+ colors "~1.1.2"
+ csso "~2.3.1"
+ js-yaml "~3.7.0"
+ mkdirp "~0.5.1"
+ sax "~1.2.1"
+ whet.extend "~0.9.9"
+
tapable@^0.2.7:
version "0.2.9"
resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.9.tgz#af2d8bbc9b04f74ee17af2b4d9048f807acd18a8"
@@ -2522,7 +3079,7 @@ tapable@^0.2.7:
through2@0.6.3:
version "0.6.3"
resolved "https://registry.yarnpkg.com/through2/-/through2-0.6.3.tgz#795292fde9f254c2a368b38f9cc5d1bd4663afb6"
- integrity sha1-eVKS/enyVMKjaLOPnMXRvUZjr7Y=
+ integrity sha512-6UXIsO0fTTYMgxeQ9pisMOIqF/uL6Ebva+4HxihtLLR2gscWEu+OTMwar/0TYZaeDSNS1msIJAXJRis+GojL8g==
dependencies:
readable-stream ">=1.0.33-1 <1.1.0-0"
xtend ">=4.0.0 <4.1.0-0"
@@ -2538,7 +3095,7 @@ through2@^2.0.0:
timed-out@4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f"
- integrity sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=
+ integrity sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA==
timers-browserify@^2.0.4:
version "2.0.12"
@@ -2550,27 +3107,27 @@ timers-browserify@^2.0.4:
to-arraybuffer@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43"
- integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=
+ integrity sha512-okFlQcoGTi4LQBG/PgSYblw9VOyptsz2KJZqc6qtgGdes8VktzUQkj4BI2blit072iS8VODNcMA+tvnS9dnuMA==
to-fast-properties@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47"
- integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc=
+ integrity sha512-lxrWP8ejsq+7E3nNjwYmUBMAgjMTZoTI+sdBOpvNyijeDLa29LUn9QaoXAHv4+Z578hbmHHJKZknzxVtvo77og==
trim-right@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
- integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=
+ integrity sha512-WZGXGstmCWgeevgTL54hrCuw1dyMQIzWy7ZfqRJfSmJZBwklI15egmQytFP6bPidmw3M8d5yEowl1niq4vmqZw==
tslib@^2.3.0:
- version "2.3.1"
- resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01"
- integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3"
+ integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==
tty-browserify@0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6"
- integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=
+ integrity sha512-JVa5ijo+j/sOoHGjw0sxw734b1LhBkQ3bvUGNdxnVXDCX81Yx7TFgnZygxrIIWn23hbfTaMYLwRmAxFyDuFmIw==
type@^1.0.1:
version "1.2.0"
@@ -2585,7 +3142,7 @@ type@^2.5.0:
typedarray@^0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
- integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=
+ integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==
uglify-es@^3.3.4:
version "3.3.9"
@@ -2609,6 +3166,16 @@ uglifyjs-webpack-plugin@1.3.0, uglifyjs-webpack-plugin@^0.4.6:
webpack-sources "^1.1.0"
worker-farm "^1.5.2"
+uniq@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff"
+ integrity sha512-Gw+zz50YNKPDKXs+9d+aKAjVwpjNwqzvNpLigIruT4HA9lMZNdMqs9x07kKHB/L9WRzqp4+DlTU5s4wG2esdoA==
+
+uniqs@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02"
+ integrity sha512-mZdDpf3vBV5Efh29kMw5tXoup/buMgxLzOt/XKFKcVmi+15ManNQWr6HfZ2aiZTYlYixbdNJ0KFmIZIv52tHSQ==
+
unique-filename@^1.1.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230"
@@ -2631,14 +3198,14 @@ uri-js@^4.2.2:
punycode "^2.1.0"
urijs@^1.19.7:
- version "1.19.7"
- resolved "https://registry.yarnpkg.com/urijs/-/urijs-1.19.7.tgz#4f594e59113928fea63c00ce688fb395b1168ab9"
- integrity sha512-Id+IKjdU0Hx+7Zx717jwLPsPeUqz7rAtuVBRLLs+qn+J2nf9NGITWVCxcijgYxBqe83C7sqsQPs6H1pyz3x9gA==
+ version "1.19.11"
+ resolved "https://registry.yarnpkg.com/urijs/-/urijs-1.19.11.tgz#204b0d6b605ae80bea54bea39280cdb7c9f923cc"
+ integrity sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==
url@^0.11.0:
version "0.11.0"
resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1"
- integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=
+ integrity sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ==
dependencies:
punycode "1.3.2"
querystring "0.2.0"
@@ -2646,12 +3213,12 @@ url@^0.11.0:
util-deprecate@^1.0.1, util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
- integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
+ integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
util@0.10.3:
version "0.10.3"
resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9"
- integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk=
+ integrity sha512-5KiHfsmkqacuKjkRkdV7SsfDJ2EGiPsK92s2MhNSY0craxjTdKTtqKsJaCWp4LW33ZZ0OPUv1WO/TFvNQRiQxQ==
dependencies:
inherits "2.0.1"
@@ -2675,15 +3242,20 @@ validate-npm-package-license@^3.0.1:
spdx-correct "^3.0.0"
spdx-expression-parse "^3.0.0"
+vendors@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/vendors/-/vendors-1.0.4.tgz#e2b800a53e7a29b93506c3cf41100d16c4c4ad8e"
+ integrity sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w==
+
vm-browserify@^1.0.1:
version "1.1.2"
resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0"
integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==
watchpack@^1.4.0, watchpack@^2.3.1:
- version "2.3.1"
- resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.3.1.tgz#4200d9447b401156eeca7767ee610f8809bc9d25"
- integrity sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d"
+ integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==
dependencies:
glob-to-regexp "^0.4.1"
graceful-fs "^4.1.2"
@@ -2691,7 +3263,7 @@ watchpack@^1.4.0, watchpack@^2.3.1:
webpack-bundle-tracker@0.0.9:
version "0.0.9"
resolved "https://registry.yarnpkg.com/webpack-bundle-tracker/-/webpack-bundle-tracker-0.0.9.tgz#fd6ba1e65478f5cd3414e6f2ef3d4e1be0e3cd9d"
- integrity sha1-/Wuh5lR49c00FOby7z1OG+DjzZ0=
+ integrity sha512-Dnpn1zUrLjjKKsKbW4/F+FbR/tm254eEaiszVPAvF9Rnk+qVvmNMz5yH1QAW+cAHOg/bEfJwI8WghyOnh2e7zw==
dependencies:
mkdirp "^0.5.1"
strip-ansi "^2.0.1"
@@ -2732,10 +3304,15 @@ webpack@3.12.0:
webpack-sources "^1.0.1"
yargs "^8.0.2"
+whet.extend@~0.9.9:
+ version "0.9.9"
+ resolved "https://registry.yarnpkg.com/whet.extend/-/whet.extend-0.9.9.tgz#f877d5bf648c97e5aa542fadc16d6a259b9c11a1"
+ integrity sha512-mmIPAft2vTgEILgPeZFqE/wWh24SEsR/k+N9fJ3Jxrz44iDFy9aemCxdksfURSHYFCLmvs/d/7Iso5XjPpNfrA==
+
which-module@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
- integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
+ integrity sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==
which@^1.2.9:
version "1.3.1"
@@ -2754,7 +3331,7 @@ worker-farm@^1.5.2:
wrap-ansi@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85"
- integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=
+ integrity sha512-vAaEaDM946gbNpH5pLVNR+vX2ht6n0Bt3GXwVB1AuAqZosOvHNF3P7wDnh8KLkSqgUh0uh77le7Owgoz+Z9XBw==
dependencies:
string-width "^1.0.1"
strip-ansi "^3.0.1"
@@ -2762,7 +3339,7 @@ wrap-ansi@^2.0.0:
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
- integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
+ integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
"xtend@>=4.0.0 <4.1.0-0", xtend@^4.0.0, xtend@~4.0.1:
version "4.0.2"
@@ -2782,19 +3359,19 @@ y18n@^4.0.0:
yallist@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52"
- integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=
+ integrity sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==
yargs-parser@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-7.0.0.tgz#8d0ac42f16ea55debd332caf4c4038b3e3f5dfd9"
- integrity sha1-jQrELxbqVd69MyyvTEA4s+P139k=
+ integrity sha512-WhzC+xgstid9MbVUktco/bf+KJG+Uu6vMX0LN1sLJvwmbCQVxb4D8LzogobonKycNasCZLdOzTAk1SK7+K7swg==
dependencies:
camelcase "^4.1.0"
yargs@^8.0.2:
version "8.0.2"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-8.0.2.tgz#6299a9055b1cefc969ff7e79c1d918dceb22c360"
- integrity sha1-YpmpBVsc78lp/355wdkY3Osiw2A=
+ integrity sha512-3RiZrpLpjrzIAKgGdPktBcMP/eG5bDFlkI+PHle1qwzyVXyDQL+pD/eZaMoOOO0Y7LLBfjpucObuUm/icvbpKQ==
dependencies:
camelcase "^4.1.0"
cliui "^3.2.0"
diff --git a/admin_tests/regisration_schemas/__init__.py b/admin_tests/regisration_schemas/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/admin_tests/regisration_schemas/test_views.py b/admin_tests/regisration_schemas/test_views.py
new file mode 100644
index 00000000000..a6135e90501
--- /dev/null
+++ b/admin_tests/regisration_schemas/test_views.py
@@ -0,0 +1,220 @@
+import pytest
+
+from django.test import RequestFactory
+
+from osf.models import RegistrationSchema
+from admin_tests.utilities import setup_view
+from admin.registration_schemas import views
+from django.contrib.messages.storage.fallback import FallbackStorage
+
+from django.core.files.uploadedfile import SimpleUploadedFile
+from osf_tests.factories import RegistrationProviderFactory, RegistrationFactory
+
+
+@pytest.mark.django_db
+class TestRegistrationSchemaList:
+
+ @pytest.fixture()
+ def req(self):
+ req = RequestFactory().get('/fake_path')
+ # django.contrib.messages has a bug which effects unittests
+ # more info here -> https://code.djangoproject.com/ticket/17971
+ setattr(req, 'session', 'session')
+ messages = FallbackStorage(req)
+ setattr(req, '_messages', messages)
+ return req
+
+ @pytest.fixture()
+ def registration_schema(self):
+ return RegistrationSchema.objects.create(
+ name='foo',
+ schema={'foo': 42, 'atomicSchema': True},
+ schema_version=1,
+ active=False,
+ visible=False
+ )
+
+ @pytest.fixture()
+ def view(self, req, registration_schema):
+ view = views.RegistrationSchemaListView()
+ view.kwargs = {'registration_schema_id': registration_schema.id}
+ return setup_view(view, req)
+
+ def test_registration_schema_list(self, view, registration_schema, req):
+ data = view.get_context_data()
+ assert any(item.id == registration_schema.id for item in data['registration_schemas'])
+
+
+@pytest.mark.django_db
+@pytest.mark.urls('admin.base.urls')
+class TestRegistrationSchemaDetail:
+
+ @pytest.fixture()
+ def req(self):
+ req = RequestFactory().get('/fake_path')
+ # django.contrib.messages has a bug which effects unittests
+ # more info here -> https://code.djangoproject.com/ticket/17971
+ setattr(req, 'session', 'session')
+ messages = FallbackStorage(req)
+ setattr(req, '_messages', messages)
+ return req
+
+ @pytest.fixture()
+ def registration_schema(self):
+ return RegistrationSchema.objects.create(
+ name='foo',
+ schema={'foo': 42, 'atomicSchema': True},
+ schema_version=1,
+ active=False,
+ visible=False
+ )
+
+ @pytest.fixture()
+ def view(self, req, registration_schema):
+ plain_view = views.RegistrationSchemaDetailView()
+ view = setup_view(plain_view, req)
+ view.kwargs = {'registration_schema_id': registration_schema.id}
+ return view
+
+ def test_registration_schema_detail(self, view, registration_schema):
+ registration_schema.visible = True
+ registration_schema.active = True
+ registration_schema.save()
+
+ context = view.get_context_data()
+ assert context['registration_schema'] == registration_schema
+ assert context['form'].data['active'] == registration_schema.active
+ assert context['form'].data['visible'] == registration_schema.visible
+
+ def test_registration_schema_update(self, view, registration_schema):
+ assert not registration_schema.visible
+ assert not registration_schema.active
+ form = view.get_form()
+
+ # `['on'] indicates a selected toggle from this form
+ form.data['active'] = ['on']
+ form.data['visible'] = ['on']
+
+ view.form_valid(form)
+
+ registration_schema.refresh_from_db()
+ assert registration_schema.visible
+ assert registration_schema.active
+
+
+@pytest.mark.django_db
+@pytest.mark.urls('admin.base.urls')
+class TestCreateRegistrationSchema:
+
+ @pytest.fixture()
+ def req(self):
+ req = RequestFactory().get('/fake_path')
+ # django.contrib.messages has a bug which effects unittests
+ # more info here -> https://code.djangoproject.com/ticket/17971
+ setattr(req, 'session', 'session')
+ messages = FallbackStorage(req)
+ setattr(req, '_messages', messages)
+ return req
+
+ @pytest.fixture
+ def csv_data(self):
+ return b'block_type,display_text,help_text,example_text,required,registration_response_key,NOEX_updates,' \
+ b'NOEX_update_reason\npage-heading,This is the page heading,"This is extra, helpful context",,FALSE,,' \
+ b'FALSE,'
+
+ @pytest.fixture
+ def csv_file(self, csv_data):
+ return SimpleUploadedFile('test_file.csv', csv_data, content_type='application/csv')
+
+ @pytest.fixture()
+ def view(self, req):
+ plain_view = views.RegistrationSchemaCreateView()
+ view = setup_view(plain_view, req)
+ return view
+
+ @pytest.fixture()
+ def form(self, view, csv_file):
+ form = view.get_form()
+ form.data['name'] = 'Trust the Process'
+ form.files['schema'] = csv_file
+ return form
+
+ def test_registration_schema_create(self, view, csv_file, form, req):
+ view.form_valid(form)
+ registration_schema = RegistrationSchema.objects.get(name=form.data['name'])
+ assert registration_schema.schema_blocks.count() == 1
+ block = registration_schema.schema_blocks.first()
+ assert block.block_type == 'page-heading'
+ assert block.display_text == 'This is the page heading'
+ assert registration_schema.schema_version == 1
+
+ def test_registration_schema_increment_version(self, view, csv_file, form, req):
+ view.form_valid(form)
+ registration_schema = RegistrationSchema.objects.get_latest_version(name=form.data['name'])
+ assert registration_schema.schema_version == 1
+
+ view.form_valid(form)
+ registration_schema = RegistrationSchema.objects.get_latest_version(name=form.data['name'])
+ assert registration_schema.schema_version == 2
+
+ def test_registration_schema_csv_to_blocks(self, view, csv_file):
+ blocks = view.csv_to_blocks(csv_file)
+ assert len(blocks) == 1
+ assert blocks[0]['block_type'] == 'page-heading'
+ assert blocks[0]['display_text'] == 'This is the page heading'
+
+
+@pytest.mark.django_db
+@pytest.mark.urls('admin.base.urls')
+class TestDeleteRegistrationSchema:
+
+ @pytest.fixture()
+ def req(self):
+ req = RequestFactory().get('/fake_path')
+ # django.contrib.messages has a bug which effects unittests
+ # more info here -> https://code.djangoproject.com/ticket/17971
+ setattr(req, 'session', 'session')
+ messages = FallbackStorage(req)
+ setattr(req, '_messages', messages)
+ return req
+
+ @pytest.fixture()
+ def registration_schema(self):
+ return RegistrationSchema.objects.create(
+ name='foo',
+ schema={'foo': 42, 'atomicSchema': True},
+ schema_version=1,
+ active=False,
+ visible=False
+ )
+
+ @pytest.fixture()
+ def registration(self, registration_schema):
+ registration = RegistrationFactory()
+ registration.registered_schema.add(registration_schema)
+ registration.save()
+ return registration
+
+ @pytest.fixture()
+ def provider(self, registration_schema):
+ provider = RegistrationProviderFactory()
+ registration_schema.providers.add(provider)
+ return provider
+
+ @pytest.fixture()
+ def view(self, req, registration_schema):
+ view = views.RegistrationSchemaDeleteView()
+ view = setup_view(view, req)
+ view.kwargs = {'registration_schema_id': registration_schema.id}
+ return view
+
+ def test_registration_schema_delete(self, req, view, registration_schema):
+ view.delete(req)
+ assert not RegistrationSchema.objects.filter(id=registration_schema.id)
+
+ def test_registration_schema_prevent_delete_if_used(self, req, view, registration_schema, provider, registration):
+ """
+ If a Registration Schema is being used as part of registration it shouldn't be deletable from the admin app.
+ """
+ view.delete(req)
+ assert RegistrationSchema.objects.filter(id=registration_schema.id)
diff --git a/api/base/serializers.py b/api/base/serializers.py
index 6757891d5ad..5d3f7e996f7 100644
--- a/api/base/serializers.py
+++ b/api/base/serializers.py
@@ -919,7 +919,13 @@ def to_representation(self, value):
try:
related_type = resolved_url.namespace.split(':')[-1]
# TODO: change kwargs to preprint_provider_id and registration_id
- if related_type in ('preprint_providers', 'preprint-providers', 'registration-providers'):
+ if related_class.view_name == 'node-settings':
+ related_id = resolved_url.kwargs['node_id']
+ related_type = 'node-setting'
+ elif related_class.view_name == 'node-storage':
+ related_id = resolved_url.kwargs['node_id']
+ related_type = 'node-storage'
+ elif related_type in ('preprint_providers', 'preprint-providers', 'registration-providers'):
related_id = resolved_url.kwargs['provider_id']
elif related_type in ('registrations', 'draft_nodes'):
related_id = resolved_url.kwargs['node_id']
diff --git a/api/base/throttling.py b/api/base/throttling.py
index 86aa01b1377..765be5f991f 100644
--- a/api/base/throttling.py
+++ b/api/base/throttling.py
@@ -121,11 +121,13 @@ def allow_request(self, request, view):
return super(SendEmailDeactivationThrottle, self).allow_request(request, view)
-class BurstRateThrottle(UserRateThrottle):
+class BurstRateThrottle(NonCookieAuthThrottle, UserRateThrottle):
scope = 'burst'
+
class FilesRateThrottle(NonCookieAuthThrottle, UserRateThrottle):
scope = 'files'
+
class FilesBurstRateThrottle(NonCookieAuthThrottle, UserRateThrottle):
scope = 'files-burst'
diff --git a/api/base/views.py b/api/base/views.py
index 12d000452a6..eddb7e2a4a6 100644
--- a/api/base/views.py
+++ b/api/base/views.py
@@ -638,11 +638,23 @@ def bulk_get_file_nodes_from_wb_resp(self, files_list):
)
# mirrors BaseFileNode get_or_create
+ _path = '/' + attrs['path'].lstrip('/')
+ query = {
+ 'target_object_id': node.id,
+ 'target_content_type': content_type,
+ '_path': _path,
+ }
+
+ # Dataverse provides us two sets of files with the same path, so we disambiguate the paths, this
+ # preserves legacy behavior by distingishing them by version (Draft/Published).
+ if attrs['provider'] == 'dataverse':
+ query.update({'_history__0__extra__datasetVersion': attrs['extra']['datasetVersion']})
+
try:
- file_obj = base_class.objects.get(target_object_id=node.id, target_content_type=content_type, _path='/' + attrs['path'].lstrip('/'))
+ file_obj = base_class.objects.get(**query)
except base_class.DoesNotExist:
# create method on BaseFileNode appends provider, bulk_create bypasses this step so it is added here
- file_obj = base_class(target=node, _path='/' + attrs['path'].lstrip('/'), provider=base_class._provider)
+ file_obj = base_class(target=node, _path=_path, provider=base_class._provider)
objs_to_create[base_class].append(file_obj)
else:
file_objs.append(file_obj)
@@ -655,7 +667,8 @@ def bulk_get_file_nodes_from_wb_resp(self, files_list):
base_class.objects.bulk_create(objs_to_create[base_class])
file_objs += objs_to_create[base_class]
- return file_objs
+ # stuff list into QuerySet
+ return BaseFileNode.objects.filter(id__in=[item.id for item in file_objs])
def get_file_node_from_wb_resp(self, item):
"""Takes file data from wb response, touches/updates metadata for it, and returns file object"""
diff --git a/api/files/annotations.py b/api/files/annotations.py
new file mode 100644
index 00000000000..53b38bcf069
--- /dev/null
+++ b/api/files/annotations.py
@@ -0,0 +1,86 @@
+from django.db.models import BooleanField, Case, Exists, F, IntegerField, Max, OuterRef, Q, Subquery, Value, When
+from django.db.models.functions.base import Cast
+from django.contrib.postgres.fields.jsonb import KeyTextTransform
+from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
+from osf.utils.fields import NonNaiveDateTimeField
+from osf.models import FileVersion
+
+
+# Get date modified for OSF and non-OSF providers
+DATE_MODIFIED = Case(
+ When(
+ provider='osfstorage',
+ then=Cast(Max('versions__created'), NonNaiveDateTimeField()),
+ ),
+ default=Cast(
+ KeyTextTransform(
+ 'value',
+ Cast(
+ KeyTextTransform(
+ 'modified',
+ Cast(
+ KeyTextTransform(
+ -1,
+ '_history',
+ ), output_field=DateTimeAwareJSONField(),
+ ),
+ ), output_field=DateTimeAwareJSONField(),
+ ),
+ ), output_field=NonNaiveDateTimeField(),
+ ),
+)
+
+def make_show_as_unviewed_annotations(user):
+ '''Returns the annotations required to set the current_user_has_viewed attribute.
+
+ Usage:
+ OsfStorageFile.objects.annotate(**make_show_as_unviewed_annotations(request.user))
+
+ show_as_unviewed is only true if the user has not seen the latest version of a file
+ but has looked at it previously. Making this determination requires multiple annotations,
+ which is why this returns a dictionary that must be unpacked into kwargs.
+ '''
+ if user.is_anonymous:
+ return {'show_as_unviewed': Value(False, output_field=BooleanField())}
+
+ seen_versions = FileVersion.objects.annotate(
+ latest_version=Subquery(
+ FileVersion.objects.filter(
+ basefilenode=OuterRef('basefilenode'),
+ ).order_by('-created').values('id')[:1],
+ output_field=IntegerField(),
+ ),
+ ).filter(seen_by=user)
+
+ has_seen_latest = Exists(
+ seen_versions.filter(basefilenode=OuterRef('id')).filter(id=F('latest_version')),
+ )
+ has_previously_seen = Exists(
+ seen_versions.filter(basefilenode=OuterRef('id')).exclude(id=F('latest_version')),
+ )
+ show_as_unviewed = Case(
+ When(Q(has_seen_latest=False) & Q(has_previously_seen=True), then=Value(True)),
+ default=Value(False),
+ output_field=BooleanField(),
+ )
+
+ return {
+ 'has_seen_latest': has_seen_latest,
+ 'has_previously_seen': has_previously_seen,
+ 'show_as_unviewed': show_as_unviewed,
+ }
+
+def check_show_as_unviewed(user, osf_file):
+ '''A separate function for assigning the show_as_unviewed value to a single instance.
+
+ Our logic is not conducive to assigning annotations to a single file, so do it manually
+ in the DetailView case.
+ '''
+ if user.is_anonymous:
+ return False
+
+ latest_version = osf_file.versions.order_by('-created').first()
+ return (
+ osf_file.versions.filter(seen_by=user).exists()
+ and not latest_version.seen_by.filter(id=user.id).exists()
+ )
diff --git a/api/files/serializers.py b/api/files/serializers.py
index 2ebad8f90c0..cd5dad0f83f 100644
--- a/api/files/serializers.py
+++ b/api/files/serializers.py
@@ -213,7 +213,12 @@ class BaseFileSerializer(JSONAPISerializer):
read_only=True, help_text='The Unix-style path of this object relative to the provider root',
)
last_touched = VersionedDateTimeField(read_only=True, help_text='The last time this file had information fetched about it via the OSF')
- date_modified = ser.SerializerMethodField(read_only=True, help_text='Timestamp when the file was last modified')
+ date_modified = VersionedDateTimeField(
+ read_only=True,
+ help_text='Timestamp when the file was last modified',
+ required=False,
+ allow_null=True,
+ )
date_created = ser.SerializerMethodField(read_only=True, help_text='Timestamp when the file was created')
extra = ser.SerializerMethodField(read_only=True, help_text='Additional metadata about this file')
tags = JSONAPIListField(child=FileTagField(), required=False)
@@ -261,9 +266,12 @@ class BaseFileSerializer(JSONAPISerializer):
def absolute_url(self, obj):
if obj.is_file:
- return furl.furl(settings.DOMAIN).set(
+ url = furl.furl(settings.DOMAIN).set(
path=(obj.target._id, 'files', obj.provider, obj.path.lstrip('/')),
- ).url
+ )
+ if obj.provider == 'dataverse':
+ url.add(query_params={'version': obj.history[-1]['extra']['datasetVersion']})
+ return url.url
def get_download_link(self, obj):
if obj.is_file:
@@ -284,22 +292,6 @@ def get_size(self, obj):
return self.size
return None
- def get_date_modified(self, obj):
- mod_dt = None
- if obj.provider == 'osfstorage' and obj.versions.exists():
- # Each time an osfstorage file is added or uploaded, a new version object is created with its
- # date_created equal to the time of the update. The external_modified is the modified date
- # from the backend the file is stored on. This field refers to the modified date on osfstorage,
- # so prefer to use the created of the latest version.
- mod_dt = obj.versions.first().created
- elif obj.provider != 'osfstorage' and obj.history:
- mod_dt = obj.history[-1].get('modified', None)
-
- if self.context['request'].version >= '2.2' and obj.is_file and mod_dt:
- return datetime.strftime(mod_dt, '%Y-%m-%dT%H:%M:%S.%fZ')
-
- return mod_dt and mod_dt.replace(tzinfo=pytz.utc)
-
def get_date_created(self, obj):
creat_dt = None
if obj.provider == 'osfstorage' and obj.versions.exists():
@@ -328,6 +320,10 @@ def get_extra(self, obj):
}
if obj.provider == 'osfstorage' and obj.is_file:
extras['downloads'] = obj.get_download_count()
+
+ if obj.provider == 'dataverse':
+ extras.update(obj.history[-1]['extra'])
+
return extras
def get_current_user_can_comment(self, obj):
@@ -395,6 +391,14 @@ class FileSerializer(BaseFileSerializer):
)
target = TargetField(link_type='related', meta={'type': 'get_target_type'})
+ # Assigned via annotation. See api/files/annotations for info
+ show_as_unviewed = ser.BooleanField(
+ read_only=True,
+ required=False,
+ default=False,
+ help_text='Whether to mark the file as unviewed for the current user',
+ )
+
def get_target_type(self, obj):
if isinstance(obj, Preprint):
return 'preprints'
@@ -421,9 +425,6 @@ class OsfStorageFileSerializer(FileSerializer):
'tags',
])
- def create(self, validated_data):
- return super(OsfStorageFileSerializer, self).create(validated_data)
-
class FileDetailSerializer(FileSerializer):
"""
diff --git a/api/files/views.py b/api/files/views.py
index dad90135a73..200f823f217 100644
--- a/api/files/views.py
+++ b/api/files/views.py
@@ -1,5 +1,6 @@
import io
+from django.db.models import Max
from django.http import FileResponse
from rest_framework import generics
@@ -22,6 +23,7 @@
from api.base import permissions as base_permissions
from api.nodes.permissions import ContributorOrPublic
from api.nodes.permissions import ReadOnlyIfRegistration
+from api.files import annotations
from api.files.permissions import IsPreprintFile
from api.files.permissions import CheckedOutOrAdmin
from api.files.permissions import FileMetadataRecordPermission
@@ -89,6 +91,18 @@ def get_object(self):
if self.request.GET.get('create_guid', False):
if self.get_target().has_permission(user, ADMIN) and utils.has_admin_scope(self.request):
file.get_guid(create=True)
+
+ # We normally would pass this through `get_file` as an annotation, but the `select_for_update` feature prevents
+ # grouping versions in an annotation
+ if file.kind == 'file':
+ file.show_as_unviewed = annotations.check_show_as_unviewed(
+ user=self.request.user, osf_file=file,
+ )
+ if file.provider == 'osfstorage':
+ file.date_modified = file.versions.aggregate(Max('created'))['created__max']
+ else:
+ file.date_modified = file.history[-1]['modified']
+
return file
diff --git a/api/institutions/authentication.py b/api/institutions/authentication.py
index 5a4b7701772..fc9b828ddb9 100644
--- a/api/institutions/authentication.py
+++ b/api/institutions/authentication.py
@@ -17,46 +17,38 @@
from osf import features
from osf.models import Institution
+from osf.models.institution import SharedSsoAffiliationFilterCriteriaAction
from website.mails import send_mail, WELCOME_OSF4I
from website.settings import OSF_SUPPORT_EMAIL, DOMAIN
logger = logging.getLogger(__name__)
-# This map defines how to find the secondary institution IdP which uses the shared SSO of a primary
-# IdP. Each map entry has the following format.
+# This map defines how to find the secondary institution which uses SSO of a primary one. Each map
+# entry has the following format.
#
# '': {
-# 'criteria': 'attribute',
-# 'attribute': '',
-# 'institutions': {
-# '': '',
-# '': '',
-# ...
-# },
-# ...
-# }
-#
-# Currently, the only active criteria is "attribute", which the primary institution IdP releases to
-# OSF for us to identify the secondary institution. Another option is "emailDomain". For example:
-#
-# '': {
-# 'criteria': 'emailDomain',
-# 'institutions': {
-# '': '': '',
+# 'criteria_action': '
+# 'institution_id': 'the ID of the secondary institution',
# }
+# For now, this map is temporarily defined here but will be moved to settings or be re-implemented
+# in model via relationships later. In addition, we should be able to make the attribute name fixed
+# since CAS can normalize them into "sharedSsoFilter" ahead of time.
#
INSTITUTION_SHARED_SSO_MAP = {
'brown': {
- 'criteria': 'attribute',
- 'attribute': 'isMemberOf',
- 'institutions': {
- 'thepolicylab': 'thepolicylab',
- },
+ 'attribute_name': 'isMemberOf',
+ 'criteria_action': SharedSsoAffiliationFilterCriteriaAction.EQUALS_TO.value,
+ 'criteria_value': 'thepolicylab',
+ 'institution_id': 'thepolicylab',
+ },
+ 'fsu': {
+ 'attribute_name': 'userRoles',
+ 'criteria_action': SharedSsoAffiliationFilterCriteriaAction.CONTAINS.value,
+ 'criteria_value': 'FSU_OSF_MAGLAB',
+ 'institution_id': 'nationalmaglab',
},
}
@@ -158,34 +150,42 @@ def authenticate(self, request):
secondary_institution = None
if provider['id'] in INSTITUTION_SHARED_SSO_MAP:
switch_map = INSTITUTION_SHARED_SSO_MAP[provider['id']]
- criteria_type = switch_map.get('criteria')
- if criteria_type == 'attribute':
- attribute_name = switch_map.get('attribute')
- attribute_value = provider['user'].get(attribute_name)
- if attribute_value:
- secondary_institution_id = switch_map.get(
- 'institutions',
- {},
- ).get(attribute_value)
- logger.info('Institution SSO: primary=[{}], secondary=[{}], '
- 'username=[{}]'.format(provider['id'], secondary_institution_id, username))
- secondary_institution = Institution.load(secondary_institution_id)
- if not secondary_institution:
- # Log errors and inform Sentry but do not raise an exception if OSF fails
- # to load the secondary institution from database
- message = 'Institution SSO Error: invalid secondary institution [{}]; ' \
- 'primary=[{}], username=[{}]'.format(attribute_value, provider['id'], username)
- logger.error(message)
- sentry.log_message(message)
- else:
- # SSO from primary institution only
- logger.info('Institution SSO: primary=[{}], secondary=[None], '
- 'username=[{}]'.format(provider['id'], username))
+ attribute_name = switch_map.get('attribute_name')
+ criteria_action = switch_map.get('criteria_action')
+ criteria_value = switch_map.get('criteria_value')
+ attribute_value = provider['user'].get(attribute_name)
+ # Check affiliation filter criteria and retrieve the secondary institution ID
+ secondary_institution_id = None
+ if criteria_action == SharedSsoAffiliationFilterCriteriaAction.EQUALS_TO.value:
+ secondary_institution_id = switch_map.get('institution_id') if criteria_value == attribute_value else None
+ elif criteria_action == SharedSsoAffiliationFilterCriteriaAction.CONTAINS.value:
+ secondary_institution_id = switch_map.get('institution_id') if criteria_value in attribute_value else None
else:
- message = 'Institution SSO Error: invalid criteria [{}]; ' \
- 'primary=[{}], username=[{}]'.format(criteria_type, provider['id'], username)
+ message = 'Institution Shared SSO Error: invalid affiliation filter criteria action [{}]; ' \
+ 'primary=[{}], username=[{}]'.format(criteria_action, provider['id'], username)
logger.error(message)
sentry.log_message(message)
+ # Attempt to load the secondary institution by ID
+ if secondary_institution_id:
+ logger.info(
+ 'Institution Shared SSO Eligible: primary=[{}], secondary=[{}], '
+ 'filter=[{}: {} {} {}], username=[{}]'.format(
+ provider['id'], secondary_institution_id, attribute_name,
+ attribute_value, criteria_action, criteria_value, username,
+ ),
+ )
+ secondary_institution = Institution.load(secondary_institution_id)
+ if not secondary_institution:
+ # Log errors and inform Sentry but do not raise an exception if OSF fails
+ # to load the secondary institution from database
+ message = 'Institution Shared SSO Warning: invalid secondary institution [{}], primary=[{}], ' \
+ 'username=[{}]'.format(secondary_institution_id, provider['id'], username)
+ logger.error(message)
+ sentry.log_message(message)
+ else:
+ # SSO from primary institution only
+ logger.info('Institution SSO: primary=[{}], secondary=[None], '
+ 'username=[{}]'.format(provider['id'], username))
# Use given name and family name to build full name if it is not provided
if given_name and family_name and not fullname:
diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py
index b15119c06ca..63cca6b8550 100644
--- a/api/nodes/serializers.py
+++ b/api/nodes/serializers.py
@@ -14,7 +14,7 @@
ShowIfVersion, TargetTypeField, TypeField,
WaterbutlerLink, relationship_diff, BaseAPISerializer,
HideIfWikiDisabled, ShowIfAdminScopeOrAnonymous,
- ValuesListField,
+ ValuesListField, TargetField,
)
from api.base.settings import ADDONS_FOLDER_CONFIGURABLE
from api.base.utils import (
@@ -38,6 +38,7 @@
Comment, DraftRegistration, ExternalAccount, Institution,
RegistrationSchema, AbstractNode, PrivateLink, Preprint,
RegistrationProvider, OSFGroup, NodeLicense, DraftNode,
+ Registration, Node,
)
from website.project import new_private_link
from website.project.model import NodeUpdateError
@@ -556,6 +557,11 @@ def get_current_user_permissions(self, obj):
user = self.context['request'].user
request_version = self.context['request'].version
default_perm = [osf_permissions.READ] if StrictVersion(request_version) < StrictVersion('2.11') else []
+
+ # View only link users should always get `READ` permissions regardless of other permissions
+ if Auth(private_key=self.context['request'].query_params.get('view_only')).private_link:
+ return [osf_permissions.READ]
+
if user.is_anonymous:
return default_perm
@@ -1408,6 +1414,20 @@ class NodeStorageProviderSerializer(JSONAPISerializer):
help_text='The folder in which this file exists',
)
+ target = TargetField(link_type='related', meta={'type': 'get_target_type'})
+
+ def get_target_type(self, obj):
+ if isinstance(obj, Preprint):
+ return 'preprints'
+ elif isinstance(obj, DraftNode):
+ return 'draft_nodes'
+ elif isinstance(obj, Registration):
+ return 'registrations'
+ elif isinstance(obj, Node):
+ return 'nodes'
+ else:
+ raise NotImplementedError()
+
class Meta:
type_ = 'files'
diff --git a/api/nodes/views.py b/api/nodes/views.py
index 6dd08918ef9..cf83529ca66 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -2,7 +2,7 @@
from distutils.version import StrictVersion
from django.apps import apps
-from django.db.models import Q, OuterRef, Exists, Subquery, F
+from django.db.models import F, Max, Q, Subquery
from django.utils import timezone
from django.contrib.contenttypes.models import ContentType
from rest_framework import generics, permissions as drf_permissions
@@ -11,7 +11,6 @@
from rest_framework.status import HTTP_202_ACCEPTED, HTTP_204_NO_CONTENT
from addons.base.exceptions import InvalidAuthError
-from addons.osfstorage.models import OsfStorageFolder
from api.addons.serializers import NodeAddonFolderSerializer
from api.addons.views import AddonSettingsMixin
from api.base import generic_bulk_views as bulk_views
@@ -66,6 +65,7 @@
)
from api.draft_registrations.serializers import DraftRegistrationSerializer, DraftRegistrationDetailSerializer
from api.files.serializers import FileSerializer, OsfStorageFileSerializer
+from api.files import annotations as file_annotations
from api.identifiers.serializers import NodeIdentifierSerializer
from api.identifiers.views import IdentifierList
from api.institutions.serializers import InstitutionSerializer
@@ -134,13 +134,22 @@
from framework.auth.oauth_scopes import CoreScopes
from framework.sentry import log_exception
from osf.features import OSF_GROUPS
-from osf.models import AbstractNode
-from osf.models import (Node, PrivateLink, Institution, Comment, DraftRegistration, Registration, )
-from osf.models import OSFUser
-from osf.models import OSFGroup
-from osf.models import NodeRelation, Guid
-from osf.models import BaseFileNode
-from osf.models.files import File, Folder
+from osf.models import (
+ AbstractNode,
+ OSFUser,
+ Node,
+ PrivateLink,
+ Institution,
+ Comment,
+ DraftRegistration,
+ Registration,
+ BaseFileNode,
+ OSFGroup,
+ NodeRelation,
+ Guid,
+ File,
+ Folder,
+)
from addons.osfstorage.models import Region
from osf.utils.permissions import ADMIN, WRITE_NODE
from website import mails, settings
@@ -1111,6 +1120,9 @@ def serializer_class(self):
return OsfStorageFileSerializer
return FileSerializer
+ def get_resource(self):
+ return get_object_or_error(AbstractNode, self.kwargs['node_id'], self.request)
+
# overrides FilterMixin
def postprocess_query_param(self, key, field_name, operation):
# tag queries will usually be on Tag.name,
@@ -1142,47 +1154,47 @@ def postprocess_query_param(self, key, field_name, operation):
]
def get_default_queryset(self):
- files_list = self.fetch_from_waterbutler()
-
- if isinstance(files_list, list):
- provider = self.kwargs[self.provider_lookup_url_kwarg]
- # Resolve to a provider-specific subclass, so that
- # trashed file nodes are filtered out automatically
- ConcreteFileNode = BaseFileNode.resolve_class(provider, BaseFileNode.ANY)
- file_ids = [f.id for f in self.bulk_get_file_nodes_from_wb_resp(files_list)]
- return ConcreteFileNode.objects.filter(id__in=file_ids)
-
- if isinstance(files_list, list) or not isinstance(files_list, Folder):
- # We should not have gotten a file here
- raise NotFound
-
- sub_qs = OsfStorageFolder.objects.filter(_children=OuterRef('pk'), pk=files_list.pk)
- return files_list.children.annotate(folder=Exists(sub_qs)).filter(folder=True).prefetch_related('versions', 'tags', 'guids')
+ resource = self.get_resource()
+ path = self.kwargs[self.path_lookup_url_kwarg]
+ provider = self.kwargs[self.provider_lookup_url_kwarg]
+ folder_object = self.get_file_object(resource, path, provider)
+
+ # Addon provided files/folders don't have versions so for there date modified we check the history. The history
+ # is updated every time we query the file metadata via Waterbutler.
+ if provider == 'osfstorage':
+ return folder_object.children.prefetch_related(
+ 'versions',
+ 'tags',
+ 'guids',
+ )
+ else:
+ return self.bulk_get_file_nodes_from_wb_resp(folder_object)
# overrides ListAPIView
def get_queryset(self):
path = self.kwargs[self.path_lookup_url_kwarg]
+ provider = self.kwargs[self.provider_lookup_url_kwarg]
+
# query param info when used on a folder gives that folder's metadata instead of the metadata of it's children
if 'info' in self.request.query_params and path.endswith('/'):
- fobj = self.fetch_from_waterbutler()
-
- if isinstance(fobj, list):
- node = self.get_node(check_object_permissions=False)
- base_class = BaseFileNode.resolve_class(self.kwargs[self.provider_lookup_url_kwarg], BaseFileNode.FOLDER)
- return base_class.objects.filter(
- target_object_id=node.id, target_content_type=ContentType.objects.get_for_model(node), _path=path,
- )
- elif isinstance(fobj, OsfStorageFolder):
- return BaseFileNode.objects.filter(id=fobj.id)
- else:
- raise NotFound
+ resource = self.get_resource()
+ base_class = BaseFileNode.resolve_class(provider, BaseFileNode.FOLDER)
+ queryset = base_class.objects.filter(
+ target_object_id=resource.id,
+ target_content_type=ContentType.objects.get_for_model(resource),
+ _path=path,
+ )
else:
- return self.get_queryset_from_request().distinct()
+ queryset = self.get_queryset_from_request()
+
+ return queryset.annotate(
+ date_modified=file_annotations.DATE_MODIFIED,
+ **file_annotations.make_show_as_unviewed_annotations(self.request.user)
+ )
class NodeFileDetail(JSONAPIBaseView, generics.RetrieveAPIView, WaterButlerMixin, NodeMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/nodes_files_read).
-
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
@@ -1203,11 +1215,19 @@ def get_object(self):
fobj = self.fetch_from_waterbutler()
if isinstance(fobj, dict):
# if dict it is a wb response, not file object yet
- return self.get_file_node_from_wb_resp(fobj)
+ fobj = self.get_file_node_from_wb_resp(fobj)
if isinstance(fobj, list) or not isinstance(fobj, File):
# We should not have gotten a folder here
raise NotFound
+ if fobj.kind == 'file':
+ fobj.show_as_unviewed = file_annotations.check_show_as_unviewed(
+ user=self.request.user, osf_file=fobj,
+ )
+ if fobj.provider == 'osfstorage':
+ fobj.date_modified = fobj.versions.aggregate(Max('created'))['created__max']
+ else:
+ fobj.date_modified = fobj.history[-1]['modified']
return fobj
diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index ae17b49570b..2a379d93bed 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -3,6 +3,7 @@
from rest_framework import serializers as ser
from rest_framework.fields import empty
from rest_framework.exceptions import ValidationError as DRFValidationError
+from website import settings
from api.base.exceptions import Conflict, JSONAPIException
from api.base.serializers import (
@@ -308,6 +309,13 @@ def update(self, preprint, validated_data):
save_preprint = True
if 'article_doi' in validated_data:
+ doi = settings.DOI_FORMAT.format(prefix=preprint.provider.doi_prefix, guid=preprint._id)
+ if validated_data['article_doi'] == doi:
+ raise exceptions.ValidationError(
+ detail=f'The `article_doi` "{doi}" is already associated with this'
+ f' preprint please enter a peer-reviewed publication\'s DOI',
+ )
+
preprint.article_doi = validated_data['article_doi']
save_preprint = True
diff --git a/api/preprints/views.py b/api/preprints/views.py
index bbf2fca33f5..e02ab40b6a9 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -22,7 +22,8 @@
JSONAPIMultipleRelationshipsParser,
JSONAPIMultipleRelationshipsParserForRegularJSON,
)
-from api.base.utils import absolute_reverse, get_user_auth
+
+from api.base.utils import absolute_reverse, get_user_auth, get_object_or_error
from api.base import permissions as base_permissions
from api.citations.utils import render_citation
from api.preprints.serializers import (
@@ -581,8 +582,8 @@ def get_queryset(self):
self.kwargs[self.provider_lookup_url_kwarg] = 'osfstorage'
return super(PreprintFilesList, self).get_queryset()
- def get_resource(self, check_object_permissions):
- return self.get_preprint(check_object_permissions=check_object_permissions)
+ def get_resource(self):
+ return get_object_or_error(Preprint, self.kwargs['preprint_id'], self.request)
class PreprintRequestListCreate(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin, PreprintRequestMixin):
diff --git a/api/registrations/serializers.py b/api/registrations/serializers.py
index e3f2a52d9e4..6d3637c0cba 100644
--- a/api/registrations/serializers.py
+++ b/api/registrations/serializers.py
@@ -32,11 +32,11 @@
HideIfWithdrawalOrWikiDisabled,
)
from framework.auth.core import Auth
-from osf.exceptions import ValidationValueError, NodeStateError
+from osf.exceptions import NodeStateError
from osf.models import Node
from osf.utils.registrations import strip_registered_meta_comments
from osf.utils.workflows import ApprovalStates
-from framework.sentry import log_exception
+
class RegistrationSerializer(NodeSerializer):
admin_only_editable_fields = [
@@ -696,14 +696,6 @@ def create(self, validated_data):
'The following file(s) are attached, but are not part of a component being'
' registered: {}'.format(', '.join(orphan_files_names)))
- try:
- # Still validating metadata, but whether `registration_responses` or `registration_metadata` were populated
- # on the draft, the other field was built and populated as well. Both should exist.
- draft.validate_metadata(metadata=draft.registration_metadata, required_fields=True)
- except ValidationValueError:
- log_exception() # Probably indicates a bug on our end, so log to sentry
- # TODO: Raise an error once our JSON schemas are updated
-
try:
registration = draft.register(auth, save=True, child_ids=children)
except NodeStateError as err:
diff --git a/api_tests/base/test_throttling.py b/api_tests/base/test_throttling.py
index bac8fe40e45..c7ef239a5c5 100644
--- a/api_tests/base/test_throttling.py
+++ b/api_tests/base/test_throttling.py
@@ -1,4 +1,3 @@
-import pytest
import mock
from nose.tools import * # noqa:
@@ -7,20 +6,17 @@
from tests.base import ApiTestCase
from osf_tests.factories import AuthUserFactory, ProjectFactory
-
-pytestmark = pytest.mark.skip(
- 'Unskip when throttling no longer fails on travis'
-)
-
-
class TestDefaultThrottleClasses(ApiTestCase):
@mock.patch('api.base.throttling.BaseThrottle.get_ident')
def test_default_throttle_class_calls(self, mock_base):
+ '''
+ check DEFAULT_THROTTLE_CLASSES for throttles being tested.
+ '''
base_url = '/{}nodes/'.format(API_BASE)
res = self.app.get(base_url)
assert_equal(res.status_code, 200)
- assert_equal(mock_base.call_count, 2)
+ assert_equal(mock_base.call_count, 4) # UserRateThrottle get_ident is called twice due to cache key
class TestRootThrottle(ApiTestCase):
@@ -50,24 +46,41 @@ def setUp(self):
self.user = AuthUserFactory()
self.url = '/{}nodes/'.format(API_BASE)
- @mock.patch('rest_framework.throttling.UserRateThrottle.allow_request')
+ @mock.patch('api.base.throttling.UserRateThrottle.allow_request')
def test_user_rate_allow_request_called(self, mock_allow):
res = self.app.get(self.url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(mock_allow.call_count, 1)
-class TestNonCookieAuthThrottle(ApiTestCase):
+class TestBurstRateThrottle(ApiTestCase):
def setUp(self):
- super(TestNonCookieAuthThrottle, self).setUp()
- self.url = '/{}nodes/'.format(API_BASE)
+ super().setUp()
+ self.user = AuthUserFactory()
+ self.url = f'/{API_BASE}nodes/'
+
+ @mock.patch('api.base.throttling.BurstRateThrottle.allow_request')
+ def test_user_rate_allow_request_called(self, mock_allow):
+ res = self.app.get(self.url, auth=self.user.auth)
+ assert_equal(res.status_code, 200)
+ assert_equal(mock_allow.call_count, 1)
+
+
+class TestNonCookieAuthThrottle(ApiTestCase):
+ def setUp(self):
+ super().setUp()
+ self.url = f'/{API_BASE}nodes/'
@mock.patch('api.base.throttling.NonCookieAuthThrottle.allow_request')
def test_cookie_throttle_rate_allow_request_called(self, mock_allow):
+ '''
+ check DEFAULT_THROTTLE_CLASSES for throttles being tested, NonCookieAuthThrottle is called twice as it's used by
+ two sibling classes of throttle.
+ '''
res = self.app.get(self.url)
assert_equal(res.status_code, 200)
- assert_equal(mock_allow.call_count, 1)
+ assert_equal(mock_allow.call_count, 2)
class TestAddContributorEmailThrottle(ApiTestCase):
@@ -123,6 +136,7 @@ def test_add_contrib_throttle_rate_and_default_rates_called(
self, mock_contrib_allow, mock_user_allow, mock_anon_allow):
res = self.app.get(self.public_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
- assert_equal(mock_anon_allow.call_count, 1)
+ # NonCookieAuthThrottle is called twice as it's used by two sibling classes of throttle.
+ assert_equal(mock_anon_allow.call_count, 2)
assert_equal(mock_user_allow.call_count, 1)
assert_equal(mock_contrib_allow.call_count, 1)
diff --git a/api_tests/draft_nodes/views/test_draft_node_files_lists.py b/api_tests/draft_nodes/views/test_draft_node_files_lists.py
index 555d4a3b649..15216c39f2f 100644
--- a/api_tests/draft_nodes/views/test_draft_node_files_lists.py
+++ b/api_tests/draft_nodes/views/test_draft_node_files_lists.py
@@ -19,6 +19,7 @@
from addons.github.tests.factories import GitHubAccountFactory
from api.base.utils import waterbutler_api_url_for
from api_tests import utils as api_utils
+from website import settings
class TestDraftNodeProvidersList(ApiTestCase):
@@ -71,6 +72,10 @@ def test_returns_provider_data(self):
assert_equal(data['attributes']['provider'], 'osfstorage')
assert_equal(data['attributes']['node'], self.draft_node._id)
assert_equal(data['attributes']['path'], '/')
+ assert_equal(
+ data['relationships']['target']['links']['related']['href'],
+ f'{settings.API_DOMAIN}v2/draft_nodes/{self.draft_node._id}/'
+ )
def test_returns_osfstorage_folder_version_two(self):
res = self.app.get(
diff --git a/api_tests/files/serializers/test_file_serializer.py b/api_tests/files/serializers/test_file_serializer.py
index 5412a5d4ed6..a2e9fff2c58 100644
--- a/api_tests/files/serializers/test_file_serializer.py
+++ b/api_tests/files/serializers/test_file_serializer.py
@@ -70,11 +70,13 @@ def test_file_serializer(self, file_one, node, node_folder):
# test_date_modified_formats_to_old_format
req = make_drf_request_with_version(version='2.0')
+ file_one.date_modified = modified_tz_aware # date_modified is past into the serializer via view logic
data = FileSerializer(file_one, context={'request': req}).data['data']
- assert modified_tz_aware == data['attributes']['date_modified']
+ assert modified_tz_aware.strftime('%Y-%m-%dT%H:%M:%S.%f') == data['attributes']['date_modified']
# test_date_modified_formats_to_new_format
req = make_drf_request_with_version(version='2.2')
+ file_one.date_modified = modified_tz_aware # date_modified is past into the serializer via view logic
data = FileSerializer(file_one, context={'request': req}).data['data']
assert datetime.strftime(
modified, new_format
diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py
index 5e075f67a67..163734dc7d5 100644
--- a/api_tests/files/views/test_file_detail.py
+++ b/api_tests/files/views/test_file_detail.py
@@ -10,14 +10,14 @@
from addons.github.models import GithubFileNode
from addons.osfstorage import settings as osfstorage_settings
from addons.osfstorage.listeners import checkin_files_task
+from addons.osfstorage.tests.factories import FileVersionFactory
from api.base.settings.defaults import API_BASE
from api_tests import utils as api_utils
from framework.auth.core import Auth
-<<<<<<< HEAD
-from osf.models import NodeLog, Session
-=======
-from osf.models import NodeLog, Session, QuickFilesNode, Node
->>>>>>> 313e31f680f8b92fc9355a902bfc99773d64bc89
+
+
+from osf.models import NodeLog, Session, Node, FileVersionUserMetadata
+
from osf.utils.permissions import WRITE, READ
from osf.utils.workflows import DefaultStates
from osf_tests.factories import (
@@ -146,7 +146,7 @@ def test_file_guid_created_with_cookie(
assert mock_allow.call_count == 1
def test_get_file(self, app, user, file_url, file):
- res = app.get(file_url, auth=user.auth)
+ res = app.get(f'{file_url}?version=2.2', auth=user.auth)
file.versions.first().reload()
assert res.status_code == 200
assert set(res.json.keys()) == {'meta', 'data'}
@@ -918,3 +918,62 @@ def test_withdrawn_preprint_files(self, app, file_url, preprint, user, other_use
# Admin contrib
res = app.get(file_url, auth=user.auth, expect_errors=True)
assert res.status_code == 403
+
+@pytest.mark.django_db
+class TestShowAsUnviewed:
+
+ @pytest.fixture
+ def node(self, user):
+ return ProjectFactory(creator=user, is_public=True)
+
+ @pytest.fixture
+ def test_file(self, user, node):
+ test_file = api_utils.create_test_file(node, user, create_guid=False)
+ test_file.add_version(FileVersionFactory())
+ return test_file
+
+ @pytest.fixture
+ def url(self, test_file):
+ return f'/{API_BASE}files/{test_file._id}/'
+
+ def test_show_as_unviewed__previously_seen(self, app, user, test_file, url):
+ FileVersionUserMetadata.objects.create(
+ user=user,
+ file_version=test_file.versions.order_by('created').first()
+ )
+
+ res = app.get(url, auth=user.auth)
+ assert res.json['data']['attributes']['show_as_unviewed']
+
+ FileVersionUserMetadata.objects.create(
+ user=user,
+ file_version=test_file.versions.order_by('-created').first()
+ )
+
+ res = app.get(url, auth=user.auth)
+ assert not res.json['data']['attributes']['show_as_unviewed']
+
+ def test_show_as_unviewed__not_previously_seen(self, app, user, test_file, url):
+ res = app.get(url, auth=user.auth)
+ assert not res.json['data']['attributes']['show_as_unviewed']
+
+ def test_show_as_unviewed__different_user(self, app, user, test_file, url):
+ FileVersionUserMetadata.objects.create(
+ user=user,
+ file_version=test_file.versions.order_by('created').first()
+ )
+ file_viewer = AuthUserFactory()
+
+ res = app.get(url, auth=file_viewer.auth)
+ assert not res.json['data']['attributes']['show_as_unviewed']
+
+ def test_show_as_unviewed__anonymous_user(self, app, test_file, url):
+ res = app.get(url)
+ assert not res.json['data']['attributes']['show_as_unviewed']
+
+ def test_show_as_unviewed__no_versions(self, app, user, test_file, url):
+ # Most Non-OSFStorage providers don't have versions; make sure this still works
+ test_file.versions.all().delete()
+
+ res = app.get(url, auth=user.auth)
+ assert not res.json['data']['attributes']['show_as_unviewed']
diff --git a/api_tests/files/views/test_file_list.py b/api_tests/files/views/test_file_list.py
index fb219dbfd94..6c59a36a218 100644
--- a/api_tests/files/views/test_file_list.py
+++ b/api_tests/files/views/test_file_list.py
@@ -1,4 +1,5 @@
import pytest
+import responses
from api.base.settings.defaults import API_BASE
from api_tests import utils as api_utils
@@ -7,6 +8,9 @@
ProjectFactory,
AuthUserFactory,
)
+from addons.dataverse.tests.factories import DataverseAccountFactory
+from api_tests.draft_nodes.views.test_draft_node_files_lists import prepare_mock_wb_response
+from addons.dataverse.models import DataverseFile
@pytest.fixture()
@@ -17,6 +21,25 @@ def user():
@pytest.mark.django_db
class TestNodeFileList:
+ @pytest.fixture()
+ def dataverse(self, user, node):
+ addon = node.add_addon('dataverse', auth=Auth(user))
+ oauth_settings = DataverseAccountFactory()
+ oauth_settings.save()
+ user.add_addon('dataverse')
+ user.external_accounts.add(oauth_settings)
+ user.save()
+ addon.user_settings = user.get_addon('dataverse')
+ addon.external_account = oauth_settings
+ addon.dataset_doi = 'test dataset_doi'
+ addon.dataset = 'test dataset'
+ addon._dataset_id = 'test dataset_id'
+ addon.save()
+ addon.user_settings.oauth_grants[node._id] = {
+ oauth_settings._id: []}
+ addon.user_settings.save()
+ node.save()
+
@pytest.fixture()
def node(self, user):
return ProjectFactory(creator=user)
@@ -26,6 +49,22 @@ def file(self, user, node):
return api_utils.create_test_file(
node, user, filename='file_one')
+ @pytest.fixture()
+ def dataverse_published_filenode(self, node):
+ return DataverseFile.objects.create(
+ target=node,
+ path='/testpath',
+ _history=[{'extra': {'datasetVersion': 'latest-published'}}],
+ )
+
+ @pytest.fixture()
+ def dataverse_draft_filenode(self, node):
+ return DataverseFile.objects.create(
+ target=node,
+ path='/testpath',
+ _history=[{'extra': {'datasetVersion': 'latest'}}],
+ )
+
@pytest.fixture()
def deleted_file(self, user, node):
deleted_file = api_utils.create_test_file(
@@ -42,6 +81,119 @@ def test_does_not_return_trashed_files(
data = res.json.get('data')
assert len(data) == 1
+ @responses.activate
+ def test_disambiguate_dataverse_paths_initial(self, app, user, node, dataverse):
+ '''
+ This test is for retrieving files from Dataverse initially, (Osf is contacting Dataverse after a update to their
+ Dataverse files) this test ensures both files are made into OSF filenodes and their `extra` info is passed along
+ to the front-end.
+ '''
+ prepare_mock_wb_response(
+ path='/',
+ node=node,
+ provider='dataverse',
+ files=[
+ {
+ 'name': 'testpath',
+ 'path': '/testpath',
+ 'materialized': '/testpath',
+ 'kind': 'file',
+ 'modified': 'Wed, 20 Jul 2011 22:04:50 +0000',
+ 'extra': {
+ 'datasetVersion': 'latest'
+ },
+ 'provider': 'dataverse'
+ },
+ {
+ 'name': 'testpath',
+ 'path': '/testpath',
+ 'materialized': '/testpath',
+ 'kind': 'file',
+ 'modified': 'Wed, 20 Jul 2011 22:04:50 +0000',
+ 'extra': {
+ 'datasetVersion': 'latest-published'
+ },
+ 'provider': 'dataverse'
+ },
+ ]
+ )
+ res = app.get(
+ f'/{API_BASE}nodes/{node._id}/files/dataverse/?sort=date_modified',
+ auth=node.creator.auth
+ )
+ data = res.json['data']
+ assert len(data) == 2
+ assert data[0]['attributes']['extra'] == {
+ 'datasetVersion': 'latest',
+ 'hashes': {
+ 'md5': None,
+ 'sha256': None
+ }
+ }
+ assert data[1]['attributes']['extra'] == {
+ 'datasetVersion': 'latest-published',
+ 'hashes': {
+ 'md5': None,
+ 'sha256': None
+ }
+ }
+
+ @responses.activate
+ def test_disambiguate_dataverse_paths_retrieve(self, app, user, node, dataverse, dataverse_draft_filenode, dataverse_published_filenode):
+ '''
+ This test is for retrieving files from Dataverse and disambiguating their corresponding OSF filenodes and
+ ensures their `extra` info is passed along to the front-end. Waterbulter must also be mocked here, otherwise OSF
+ will assume the files are gone.
+ '''
+ prepare_mock_wb_response(
+ path='/',
+ node=node,
+ provider='dataverse',
+ files=[
+ {
+ 'name': 'testpath',
+ 'path': '/testpath',
+ 'materialized': '/testpath',
+ 'kind': 'file',
+
+ 'extra': {
+ 'datasetVersion': 'latest',
+ },
+ 'provider': 'dataverse',
+ },
+ {
+ 'name': 'testpath',
+ 'path': '/testpath',
+ 'materialized': '/testpath',
+ 'kind': 'file',
+ 'extra': {
+ 'datasetVersion': 'latest-published',
+ },
+ 'provider': 'dataverse',
+ },
+ ]
+ )
+ res = app.get(
+ f'/{API_BASE}nodes/{node._id}/files/dataverse/?sort=date_modified',
+ auth=node.creator.auth
+ )
+ data = res.json['data']
+ assert len(data) == 2
+ assert data[0]['attributes']['extra'] == {
+ 'datasetVersion': 'latest',
+ 'hashes': {
+ 'md5': None,
+ 'sha256': None
+ }
+ }
+ assert data[1]['attributes']['extra'] == {
+ 'datasetVersion': 'latest-published',
+ 'hashes': {
+ 'md5': None,
+ 'sha256': None
+ }
+ }
+
@pytest.mark.django_db
class TestFileFiltering:
diff --git a/api_tests/institutions/views/test_institution_auth.py b/api_tests/institutions/views/test_institution_auth.py
index 28ff5928ec1..ca1c4c951c5 100644
--- a/api_tests/institutions/views/test_institution_auth.py
+++ b/api_tests/institutions/views/test_institution_auth.py
@@ -14,6 +14,7 @@
from framework.auth.views import send_confirm_email
from osf.models import OSFUser
+from osf.models.institution import SharedSsoAffiliationFilterCriteriaAction
from osf_tests.factories import InstitutionFactory, ProjectFactory, UserFactory
from tests.base import capture_signals
@@ -31,6 +32,7 @@ def make_payload(
family_name='',
department='',
is_member_of='',
+ user_roles='',
selective_sso_filter='',
):
@@ -46,6 +48,7 @@ def make_payload(
'username': username,
'department': department,
'isMemberOf': is_member_of,
+ 'userRoles': user_roles,
'selectiveSsoFilter': selective_sso_filter,
}
}
@@ -70,7 +73,7 @@ def institution():
@pytest.fixture()
-def institution_primary():
+def institution_primary_type_1():
institution = InstitutionFactory()
institution._id = 'brown'
institution.save()
@@ -78,13 +81,29 @@ def institution_primary():
@pytest.fixture()
-def institution_secondary():
+def institution_secondary_type_1():
institution = InstitutionFactory()
institution._id = 'thepolicylab'
institution.save()
return institution
+@pytest.fixture()
+def institution_primary_type_2():
+ institution = InstitutionFactory()
+ institution._id = 'fsu'
+ institution.save()
+ return institution
+
+
+@pytest.fixture()
+def institution_secondary_type_2():
+ institution = InstitutionFactory()
+ institution._id = 'nationalmaglab'
+ institution.save()
+ return institution
+
+
@pytest.fixture()
def institution_selective():
institution = InstitutionFactory()
@@ -98,6 +117,18 @@ def url_auth_institution():
return '/{0}institutions/auth/'.format(API_BASE)
+@pytest.fixture()
+def type_2_eligible_user_roles():
+ return 'FSU_IAM_AD_MGMT;FSU_MYFSUADMIN;CS_ADMN_STDT_CNT;FSU_IAM_REG;FSU_OSF_MAGLAB;' \
+ 'FSU_FULL_IAM_LOOKUP;FSU_OB_Related_Content_FDA;FSU_OB_FI_EVERYONE;FSU_MS_LIC_FULL'
+
+
+@pytest.fixture()
+def type_2_ineligible_user_roles():
+ return 'IT_Professional;FSU_IAM_AD_MGMT;FSU_MYFSUADMIN;CS_ADMN_STDT_CNT;FSU_IAM_REG;' \
+ 'FSU_FULL_IAM_LOOKUP;FSU_OB_Related_Content_FDA;FSU_OB_FI_EVERYONE;FSU_MS_LIC_FULL'
+
+
@pytest.mark.django_db
class TestInstitutionAuth:
@@ -408,16 +439,270 @@ def test_user_external_unconfirmed(self, app, institution, url_auth_institution)
@pytest.mark.django_db
-class TestInstitutionAuthnSharedSSO:
+class TestInstitutionAuthnSharedSSOCriteriaType2:
+
+ def test_new_user_primary_only(self, app, url_auth_institution, type_2_ineligible_user_roles,
+ institution_primary_type_2, institution_secondary_type_2):
+
+ username = 'user_created@osf.edu'
+ assert OSFUser.objects.filter(username=username).count() == 0
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles=type_2_ineligible_user_roles)
+ )
+ assert res.status_code == 204
+ assert mock_signals.signals_sent() == set([signals.user_confirmed])
+
+ user = OSFUser.objects.filter(username=username).first()
+ assert user
+ assert user.fullname == 'Fake User'
+ assert user.accepted_terms_of_service is None
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 not in user.affiliated_institutions.all()
+
+ def test_new_user_primary_and_secondary(self, app, url_auth_institution, type_2_eligible_user_roles,
+ institution_primary_type_2, institution_secondary_type_2):
+
+ username = 'user_created@osf.edu'
+ assert OSFUser.objects.filter(username=username).count() == 0
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles=type_2_eligible_user_roles)
+ )
+ assert res.status_code == 204
+ assert mock_signals.signals_sent() == set([signals.user_confirmed])
+
+ user = OSFUser.objects.filter(username=username).first()
+ assert user
+ assert user.fullname == 'Fake User'
+ assert user.accepted_terms_of_service is None
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 in user.affiliated_institutions.all()
+
+ def test_existing_user_primary_only_not_affiliated(self, app, url_auth_institution, type_2_ineligible_user_roles,
+ institution_primary_type_2, institution_secondary_type_2):
+ username = 'user_not_affiliated@primary.edu'
+ user = make_user(username, 'Foo Bar')
+ user.save()
+ number_of_affiliations = user.affiliated_institutions.count()
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles=type_2_ineligible_user_roles)
+ )
+ assert res.status_code == 204
+ assert not mock_signals.signals_sent()
+
+ user.reload()
+ assert user.fullname == 'Foo Bar'
+ assert user.affiliated_institutions.count() == number_of_affiliations + 1
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 not in user.affiliated_institutions.all()
+
+ def test_existing_user_primary_only_affiliated(self, app, url_auth_institution, type_2_ineligible_user_roles,
+ institution_primary_type_2, institution_secondary_type_2):
+ username = 'user_affiliated@primary.edu'
+ user = make_user(username, 'Foo Bar')
+ user.affiliated_institutions.add(institution_primary_type_2)
+ user.save()
+ number_of_affiliations = user.affiliated_institutions.count()
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles=type_2_ineligible_user_roles)
+ )
+ assert res.status_code == 204
+ assert not mock_signals.signals_sent()
+
+ user.reload()
+ assert user.fullname == 'Foo Bar'
+ assert user.affiliated_institutions.count() == number_of_affiliations
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 not in user.affiliated_institutions.all()
+
+ def test_existing_user_both_not_affiliated(self, app, url_auth_institution, type_2_eligible_user_roles,
+ institution_primary_type_2, institution_secondary_type_2):
+
+ username = 'user_both_not_affiliated@primary.edu'
+ user = make_user(username, 'Foo Bar')
+ user.save()
+ number_of_affiliations = user.affiliated_institutions.count()
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles=type_2_eligible_user_roles)
+ )
+ assert res.status_code == 204
+ assert not mock_signals.signals_sent()
+
+ user.reload()
+ assert user.fullname == 'Foo Bar'
+ assert user.affiliated_institutions.count() == number_of_affiliations + 2
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 in user.affiliated_institutions.all()
+
+ def test_existing_user_both_affiliated(self, app, url_auth_institution, type_2_eligible_user_roles,
+ institution_primary_type_2, institution_secondary_type_2):
+
+ username = 'user_both_affiliated@primary.edu'
+ user = make_user(username, 'Foo Bar')
+ user.affiliated_institutions.add(institution_primary_type_2)
+ user.affiliated_institutions.add(institution_secondary_type_2)
+ user.save()
+ number_of_affiliations = user.affiliated_institutions.count()
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles=type_2_eligible_user_roles)
+ )
+ assert res.status_code == 204
+ assert not mock_signals.signals_sent()
+
+ user.reload()
+ assert user.fullname == 'Foo Bar'
+ assert user.affiliated_institutions.count() == number_of_affiliations
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 in user.affiliated_institutions.all()
+
+ def test_existing_user_secondary_not_affiliated(self, app, url_auth_institution, type_2_eligible_user_roles,
+ institution_primary_type_2, institution_secondary_type_2):
+
+ username = 'user_secondary_not@primary.edu'
+ user = make_user(username, 'Foo Bar')
+ user.affiliated_institutions.add(institution_primary_type_2)
+ user.save()
+ number_of_affiliations = user.affiliated_institutions.count()
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles=type_2_eligible_user_roles)
+ )
+ assert res.status_code == 204
+ assert not mock_signals.signals_sent()
+
+ user.reload()
+ assert user.fullname == 'Foo Bar'
+ assert user.affiliated_institutions.count() == number_of_affiliations + 1
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 in user.affiliated_institutions.all()
+
+ def test_invalid_criteria_action(self, app, url_auth_institution, type_2_eligible_user_roles,
+ institution_primary_type_2, institution_secondary_type_2):
+
+ INSTITUTION_SHARED_SSO_MAP.update({
+ 'fsu': {
+ 'attribute_name': 'userRoles',
+ 'criteria_action': 'invalid_criteria_action',
+ 'criteria_value': 'FSU_OSF_MAGLAB',
+ 'institution_id': 'nationalmaglab',
+ },
+ })
+
+ username = 'user_invalid_criteria_action@primary.edu'
+ user = make_user(username, 'Foo Bar')
+ user.affiliated_institutions.add(institution_primary_type_2)
+ user.save()
+ number_of_affiliations = user.affiliated_institutions.count()
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles=type_2_eligible_user_roles)
+ )
+ assert res.status_code == 204
+ assert not mock_signals.signals_sent()
+
+ user.reload()
+ assert user.fullname == 'Foo Bar'
+ assert user.affiliated_institutions.count() == number_of_affiliations
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 not in user.affiliated_institutions.all()
+
+ def test_invalid_institution_id(self, app, url_auth_institution, type_2_eligible_user_roles,
+ institution_primary_type_2, institution_secondary_type_2):
+
+ INSTITUTION_SHARED_SSO_MAP.update({
+ 'fsu': {
+ 'attribute_name': 'userRoles',
+ 'criteria_action': SharedSsoAffiliationFilterCriteriaAction.CONTAINS.value,
+ 'criteria_value': 'FSU_OSF_MAGLAB',
+ 'institution_id': 'invalid_institution_id',
+ },
+ })
+
+ username = 'user_invalid_institution_id@primary.edu'
+ user = make_user(username, 'Foo Bar')
+ user.affiliated_institutions.add(institution_primary_type_2)
+ user.save()
+ number_of_affiliations = user.affiliated_institutions.count()
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles=type_2_eligible_user_roles)
+ )
+ assert res.status_code == 204
+ assert not mock_signals.signals_sent()
+
+ user.reload()
+ assert user.fullname == 'Foo Bar'
+ assert user.affiliated_institutions.count() == number_of_affiliations
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 not in user.affiliated_institutions.all()
+
+ def test_empty_criteria_value(self, app, url_auth_institution,
+ institution_primary_type_2, institution_secondary_type_2):
+
+ INSTITUTION_SHARED_SSO_MAP.update({
+ 'fsu': {
+ 'attribute_name': 'userRoles',
+ 'criteria_action': SharedSsoAffiliationFilterCriteriaAction.CONTAINS.value,
+ 'criteria_value': 'FSU_OSF_MAGLAB',
+ 'institution_id': 'nationalmaglab',
+ },
+ })
+
+ username = 'user_invalid_criteria_value@primary.edu'
+ user = make_user(username, 'Foo Bar')
+ user.affiliated_institutions.add(institution_primary_type_2)
+ user.save()
+ number_of_affiliations = user.affiliated_institutions.count()
+
+ with capture_signals() as mock_signals:
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_2, username, user_roles='')
+ )
+ assert res.status_code == 204
+ assert not mock_signals.signals_sent()
+
+ user.reload()
+ assert user.fullname == 'Foo Bar'
+ assert user.affiliated_institutions.count() == number_of_affiliations
+ assert institution_primary_type_2 in user.affiliated_institutions.all()
+ assert institution_secondary_type_2 not in user.affiliated_institutions.all()
+
+
+@pytest.mark.django_db
+class TestInstitutionAuthnSharedSSOCriteriaType1:
def test_new_user_primary_only(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ institution_primary_type_1, institution_secondary_type_1):
username = 'user_created@osf.edu'
assert OSFUser.objects.filter(username=username).count() == 0
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution, make_payload(institution_primary, username))
+ res = app.post(url_auth_institution, make_payload(institution_primary_type_1, username))
assert res.status_code == 204
assert mock_signals.signals_sent() == set([signals.user_confirmed])
@@ -425,18 +710,20 @@ def test_new_user_primary_only(self, app, url_auth_institution,
assert user
assert user.fullname == 'Fake User'
assert user.accepted_terms_of_service is None
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary not in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 not in user.affiliated_institutions.all()
def test_new_user_primary_and_secondary(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ institution_primary_type_1, institution_secondary_type_1):
username = 'user_created@osf.edu'
assert OSFUser.objects.filter(username=username).count() == 0
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution,
- make_payload(institution_primary, username, is_member_of='thepolicylab'))
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_1, username, is_member_of='thepolicylab')
+ )
assert res.status_code == 204
assert mock_signals.signals_sent() == set([signals.user_confirmed])
@@ -444,48 +731,48 @@ def test_new_user_primary_and_secondary(self, app, url_auth_institution,
assert user
assert user.fullname == 'Fake User'
assert user.accepted_terms_of_service is None
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 in user.affiliated_institutions.all()
def test_existing_user_primary_only_not_affiliated(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ institution_primary_type_1, institution_secondary_type_1):
username = 'user_not_affiliated@primary.edu'
user = make_user(username, 'Foo Bar')
user.save()
number_of_affiliations = user.affiliated_institutions.count()
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution, make_payload(institution_primary, username))
+ res = app.post(url_auth_institution, make_payload(institution_primary_type_1, username))
assert res.status_code == 204
assert not mock_signals.signals_sent()
user.reload()
assert user.fullname == 'Foo Bar'
assert user.affiliated_institutions.count() == number_of_affiliations + 1
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary not in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 not in user.affiliated_institutions.all()
def test_existing_user_primary_only_affiliated(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ institution_primary_type_1, institution_secondary_type_1):
username = 'user_affiliated@primary.edu'
user = make_user(username, 'Foo Bar')
- user.affiliated_institutions.add(institution_primary)
+ user.affiliated_institutions.add(institution_primary_type_1)
user.save()
number_of_affiliations = user.affiliated_institutions.count()
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution, make_payload(institution_primary, username))
+ res = app.post(url_auth_institution, make_payload(institution_primary_type_1, username))
assert res.status_code == 204
assert not mock_signals.signals_sent()
user.reload()
assert user.fullname == 'Foo Bar'
assert user.affiliated_institutions.count() == number_of_affiliations
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary not in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 not in user.affiliated_institutions.all()
def test_existing_user_both_not_affiliated(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ institution_primary_type_1, institution_secondary_type_1):
username = 'user_both_not_affiliated@primary.edu'
user = make_user(username, 'Foo Bar')
@@ -493,151 +780,161 @@ def test_existing_user_both_not_affiliated(self, app, url_auth_institution,
number_of_affiliations = user.affiliated_institutions.count()
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution,
- make_payload(institution_primary, username, is_member_of='thepolicylab'))
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_1, username, is_member_of='thepolicylab')
+ )
assert res.status_code == 204
assert not mock_signals.signals_sent()
user.reload()
assert user.fullname == 'Foo Bar'
assert user.affiliated_institutions.count() == number_of_affiliations + 2
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 in user.affiliated_institutions.all()
def test_existing_user_both_affiliated(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ institution_primary_type_1, institution_secondary_type_1):
username = 'user_both_affiliated@primary.edu'
user = make_user(username, 'Foo Bar')
- user.affiliated_institutions.add(institution_primary)
- user.affiliated_institutions.add(institution_secondary)
+ user.affiliated_institutions.add(institution_primary_type_1)
+ user.affiliated_institutions.add(institution_secondary_type_1)
user.save()
number_of_affiliations = user.affiliated_institutions.count()
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution,
- make_payload(institution_primary, username, is_member_of='thepolicylab'))
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_1, username, is_member_of='thepolicylab')
+ )
assert res.status_code == 204
assert not mock_signals.signals_sent()
user.reload()
assert user.fullname == 'Foo Bar'
assert user.affiliated_institutions.count() == number_of_affiliations
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 in user.affiliated_institutions.all()
def test_existing_user_secondary_not_affiliated(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ institution_primary_type_1, institution_secondary_type_1):
username = 'user_secondary_not@primary.edu'
user = make_user(username, 'Foo Bar')
- user.affiliated_institutions.add(institution_primary)
+ user.affiliated_institutions.add(institution_primary_type_1)
user.save()
number_of_affiliations = user.affiliated_institutions.count()
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution,
- make_payload(institution_primary, username, is_member_of='thepolicylab'))
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_1, username, is_member_of='thepolicylab')
+ )
assert res.status_code == 204
assert not mock_signals.signals_sent()
user.reload()
assert user.fullname == 'Foo Bar'
assert user.affiliated_institutions.count() == number_of_affiliations + 1
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 in user.affiliated_institutions.all()
- def test_invalid_criteria_type(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ def test_invalid_criteria_action(self, app, url_auth_institution,
+ institution_primary_type_1, institution_secondary_type_1):
INSTITUTION_SHARED_SSO_MAP.update({
'brown': {
- 'criteria': 'emailDomain',
- 'institutions': {
- 'policylab.io': 'thepolicylab',
- }
+ 'attribute_name': 'isMemberOf',
+ 'criteria_action': 'invalid_criteria_action',
+ 'criteria_value': 'thepolicylab',
+ 'institution_id': 'thepolicylab',
},
})
- username = 'user_invalid_criteria@primary.edu'
+ username = 'user_invalid_criteria_action@primary.edu'
user = make_user(username, 'Foo Bar')
- user.affiliated_institutions.add(institution_primary)
+ user.affiliated_institutions.add(institution_primary_type_1)
user.save()
number_of_affiliations = user.affiliated_institutions.count()
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution,
- make_payload(institution_primary, username, is_member_of='thepolicylab'))
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_1, username, is_member_of='thepolicylab')
+ )
assert res.status_code == 204
assert not mock_signals.signals_sent()
user.reload()
assert user.fullname == 'Foo Bar'
assert user.affiliated_institutions.count() == number_of_affiliations
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary not in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 not in user.affiliated_institutions.all()
- def test_invalid_secondary_institution(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ def test_invalid_institution_id(self, app, url_auth_institution,
+ institution_primary_type_1, institution_secondary_type_1):
INSTITUTION_SHARED_SSO_MAP.update({
'brown': {
- 'criteria': 'attribute',
- 'attribute': 'isMemberOf',
- 'institutions': {
- 'thepolicylab': 'brownlab',
- }
+ 'attribute_name': 'isMemberOf',
+ 'criteria_action': SharedSsoAffiliationFilterCriteriaAction.EQUALS_TO.value,
+ 'criteria_value': 'thepolicylab',
+ 'institution_id': 'invalid_institution_id',
},
})
- username = 'user_invalid_criteria@primary.edu'
+ username = 'user_invalid_institution_id@primary.edu'
user = make_user(username, 'Foo Bar')
- user.affiliated_institutions.add(institution_primary)
+ user.affiliated_institutions.add(institution_primary_type_1)
user.save()
number_of_affiliations = user.affiliated_institutions.count()
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution,
- make_payload(institution_primary, username, is_member_of='thepolicylab'))
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_1, username, is_member_of='thepolicylab')
+ )
assert res.status_code == 204
assert not mock_signals.signals_sent()
user.reload()
assert user.fullname == 'Foo Bar'
assert user.affiliated_institutions.count() == number_of_affiliations
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary not in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 not in user.affiliated_institutions.all()
- def test_empty_secondary_institution(self, app, url_auth_institution,
- institution_primary, institution_secondary):
+ def test_invalid_criteria_value(self, app, url_auth_institution,
+ institution_primary_type_1, institution_secondary_type_1):
INSTITUTION_SHARED_SSO_MAP.update({
'brown': {
- 'criteria': 'attribute',
- 'attribute': 'isMemberOf',
- 'institutions': {
- 'thepolicylab': 'thepolicylab',
- }
+ 'attribute_name': 'isMemberOf',
+ 'criteria_action': SharedSsoAffiliationFilterCriteriaAction.EQUALS_TO.value,
+ 'criteria_value': 'thepolicylab',
+ 'institution_id': 'thepolicylab',
},
})
- username = 'user_invalid_criteria@primary.edu'
+ username = 'user_invalid_criteria_value@primary.edu'
user = make_user(username, 'Foo Bar')
- user.affiliated_institutions.add(institution_primary)
+ user.affiliated_institutions.add(institution_primary_type_1)
user.save()
number_of_affiliations = user.affiliated_institutions.count()
with capture_signals() as mock_signals:
- res = app.post(url_auth_institution,
- make_payload(institution_primary, username, is_member_of='brownlab'))
+ res = app.post(
+ url_auth_institution,
+ make_payload(institution_primary_type_1, username, is_member_of='invalid_criteria_value')
+ )
assert res.status_code == 204
assert not mock_signals.signals_sent()
user.reload()
assert user.fullname == 'Foo Bar'
assert user.affiliated_institutions.count() == number_of_affiliations
- assert institution_primary in user.affiliated_institutions.all()
- assert institution_secondary not in user.affiliated_institutions.all()
+ assert institution_primary_type_1 in user.affiliated_institutions.all()
+ assert institution_secondary_type_1 not in user.affiliated_institutions.all()
@pytest.mark.django_db
diff --git a/api_tests/nodes/views/test_node_detail.py b/api_tests/nodes/views/test_node_detail.py
index 83bc3e80ce5..1efa4c5a060 100644
--- a/api_tests/nodes/views/test_node_detail.py
+++ b/api_tests/nodes/views/test_node_detail.py
@@ -631,6 +631,16 @@ def test_current_user_permissions(self, app, user, url_public, project_public, u
assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is False
assert res.json['data']['attributes']['current_user_is_contributor'] is False
+ def test_current_user_permissions_vol(self, app, user, url_public, project_public):
+ '''
+ User's including view only link query params should get ONLY read permissions even if they are admins etc.
+ '''
+ private_link = PrivateLinkFactory(anonymous=False)
+ private_link.nodes.add(project_public)
+ private_link.save()
+ res = app.get(f'{url_public}?view_only={private_link.key}', auth=user.auth)
+ assert [permissions.READ] == res.json['data']['attributes']['current_user_permissions']
+
@pytest.mark.django_db
class NodeCRUDTestCase:
diff --git a/api_tests/nodes/views/test_node_files_list.py b/api_tests/nodes/views/test_node_files_list.py
index b79c3c33c37..352787045b8 100644
--- a/api_tests/nodes/views/test_node_files_list.py
+++ b/api_tests/nodes/views/test_node_files_list.py
@@ -10,10 +10,12 @@
from addons.github.models import GithubFolder
from addons.github.tests.factories import GitHubAccountFactory
+from addons.osfstorage.tests.factories import FileVersionFactory
from api.base.settings.defaults import API_BASE
from api.base.utils import waterbutler_api_url_for
from api_tests import utils as api_utils
from tests.base import ApiTestCase
+from osf.models.files import FileVersionUserMetadata
from osf_tests.factories import (
ProjectFactory,
AuthUserFactory,
@@ -21,6 +23,8 @@
PrivateLinkFactory
)
from osf.utils.permissions import READ
+from dateutil.parser import parse as parse_date
+from website import settings
def prepare_mock_wb_response(
@@ -614,15 +618,21 @@ def add_github(self):
oauth_settings._id: []}
addon.user_settings.save()
- def check_file_order(self, resp):
- previous_file_name = 0
- for file in resp.json['data']:
- int_file_name = int(file['attributes']['name'])
- assert int_file_name > previous_file_name, 'Files were not in order'
- previous_file_name = int_file_name
+ def check_file_order(self, resp, attribute, key, ascending=False):
+ files = resp.json['data']
+ if ascending:
+ files.reverse()
+
+ previous_file_field_value = None
+ for file in files:
+ if file['attributes'][attribute] is not None:
+ file_field_value = key(file['attributes'][attribute])
+ if previous_file_field_value:
+ assert file_field_value > previous_file_field_value, 'Files were not in order'
+ previous_file_field_value = file_field_value
@responses.activate
- def test_node_files_are_sorted_correctly(self):
+ def test_node_files_are_sorted_correctly_name(self):
prepare_mock_wb_response(
node=self.project, provider='github',
files=[
@@ -656,7 +666,32 @@ def test_node_files_are_sorted_correctly(self):
url = '/{}nodes/{}/files/github/?page[size]=100'.format(
API_BASE, self.project._id)
res = self.app.get(url, auth=self.user.auth)
- self.check_file_order(res)
+ self.check_file_order(res, 'name', key=int)
+
+ @responses.activate
+ def test_node_files_are_sorted_correctly_date_modified(self):
+ prepare_mock_wb_response(
+ node=self.project, provider='github',
+ files=[
+ {'name': '01', 'path': '/01', 'materialized': '/01', 'kind': 'file', 'modified': '2022-05-08T21:01:52.001Z'},
+ {'name': '02', 'path': '/02', 'materialized': '/02', 'kind': 'file', 'modified': '2021-05-08T21:01:52.020Z'},
+ {'name': '03', 'path': '/03', 'materialized': '/03', 'kind': 'file', 'modified': '2020-05-08T21:01:52.300Z'},
+ {'name': '04', 'path': '/04', 'materialized': '/04', 'kind': 'file', 'modified': '2023-05-08T21:01:52.020Z'},
+ {'name': '05', 'path': '/05', 'materialized': '/05', 'kind': 'file', 'modified': '2024-05-08T21:01:52.001Z'},
+ {'name': '06', 'path': '/06', 'materialized': '/06', 'kind': 'file', 'modified': '2025-05-08T21:01:52.000Z'},
+ {'name': '07', 'path': '/07/', 'materialized': '/07/', 'kind': 'folder'},
+ {'name': '01', 'path': '/01/', 'materialized': '/01/', 'kind': 'folder'},
+ ]
+ )
+ self.add_github()
+
+ url = f'/{API_BASE}nodes/{self.project._id}/files/github/?sort=date_modified'
+ res = self.app.get(url, auth=self.user.auth)
+ self.check_file_order(res, 'date_modified', key=parse_date)
+
+ url = f'/{API_BASE}nodes/{self.project._id}/files/github/?sort=-date_modified'
+ res = self.app.get(url, auth=self.user.auth)
+ self.check_file_order(res, 'date_modified', key=parse_date, ascending=True)
class TestNodeStorageProviderDetail(ApiTestCase):
@@ -678,6 +713,10 @@ def test_can_view_if_contributor(self):
res.json['data']['id'],
'{}:osfstorage'.format(self.private_project._id)
)
+ assert_equal(
+ res.json['data']['relationships']['target']['links']['related']['href'],
+ f'{settings.API_DOMAIN}v2/nodes/{self.private_project._id}/'
+ )
def test_can_view_if_public(self):
res = self.app.get(self.public_url)
@@ -686,7 +725,64 @@ def test_can_view_if_public(self):
res.json['data']['id'],
'{}:osfstorage'.format(self.public_project._id)
)
+ assert_equal(
+ res.json['data']['relationships']['target']['links']['related']['href'],
+ f'{settings.API_DOMAIN}v2/nodes/{self.public_project._id}/'
+ )
def test_cannot_view_if_private(self):
res = self.app.get(self.private_url, expect_errors=True)
assert_equal(res.status_code, 401)
+
+
+class TestShowAsUnviewed(ApiTestCase):
+
+ def setUp(self):
+ super().setUp()
+ self.user = AuthUserFactory()
+ self.node = ProjectFactory(is_public=True, creator=self.user)
+ self.test_file = api_utils.create_test_file(self.node, self.user, create_guid=False)
+ self.test_file.add_version(FileVersionFactory())
+ self.url = f'/{API_BASE}nodes/{self.node._id}/files/osfstorage/'
+
+ def test_show_as_unviewed__previously_seen(self):
+ FileVersionUserMetadata.objects.create(
+ user=self.user,
+ file_version=self.test_file.versions.order_by('created').first()
+ )
+
+ res = self.app.get(self.url, auth=self.user.auth)
+ assert res.json['data'][0]['attributes']['show_as_unviewed']
+
+ FileVersionUserMetadata.objects.create(
+ user=self.user,
+ file_version=self.test_file.versions.order_by('-created').first()
+ )
+
+ res = self.app.get(self.url, auth=self.user.auth)
+ assert not res.json['data'][0]['attributes']['show_as_unviewed']
+
+ def test_show_as_unviewed__not_previously_seen(self):
+ res = self.app.get(self.url, auth=self.user.auth)
+ assert not res.json['data'][0]['attributes']['show_as_unviewed']
+
+ def test_show_as_unviewed__different_user(self):
+ FileVersionUserMetadata.objects.create(
+ user=self.user,
+ file_version=self.test_file.versions.order_by('created').first()
+ )
+ file_viewer = AuthUserFactory()
+
+ res = self.app.get(self.url, auth=file_viewer.auth)
+ assert not res.json['data'][0]['attributes']['show_as_unviewed']
+
+ def test_show_as_unviewed__anonymous_user(self):
+ res = self.app.get(self.url)
+ assert not res.json['data'][0]['attributes']['show_as_unviewed']
+
+ def test_show_as_unviewed__no_versions(self):
+ # Most Non-OSFStorage providers don't have versions; make sure this still works
+ self.test_file.versions.all().delete()
+
+ res = self.app.get(self.url, auth=self.user.auth)
+ assert not res.json['data'][0]['attributes']['show_as_unviewed']
diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index f5f1e86a916..39a5db2d822 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -1,6 +1,7 @@
import mock
import pytest
import datetime
+import responses
from django.utils import timezone
from rest_framework import exceptions
@@ -28,7 +29,8 @@
SubjectFactory,
PreprintProviderFactory,
)
-from website.settings import DOI_FORMAT
+from website.settings import DOI_FORMAT, CROSSREF_URL
+
def build_preprint_update_payload(
node_id, attributes=None, relationships=None,
@@ -303,7 +305,17 @@ def test_update_original_publication_date_to_none(self, app, preprint, url):
preprint.reload()
assert preprint.original_publication_date is None
+ @responses.activate
+ @mock.patch('osf.models.preprint.update_or_enqueue_on_preprint_updated', mock.Mock())
def test_update_preprint_permission_write_contrib(self, app, preprint, url):
+ responses.add(
+ responses.Response(
+ responses.POST,
+ CROSSREF_URL,
+ content_type='text/html;charset=ISO-8859-1',
+ status=200,
+ ),
+ )
write_contrib = AuthUserFactory()
preprint.add_contributor(write_contrib, WRITE, save=True)
@@ -524,21 +536,40 @@ def test_update_original_publication_date(self, app, user, preprint, url):
preprint.reload()
assert preprint.original_publication_date == date
+ @responses.activate
+ @mock.patch('osf.models.preprint.update_or_enqueue_on_preprint_updated', mock.Mock())
def test_update_article_doi(self, app, user, preprint, url):
- new_doi = '10.1234/ASDFASDF'
- assert preprint.article_doi != new_doi
- update_payload = build_preprint_update_payload(
- preprint._id, attributes={'doi': new_doi})
+ responses.add(
+ responses.Response(
+ responses.POST,
+ CROSSREF_URL,
+ content_type='text/html;charset=ISO-8859-1',
+ status=200,
+ ),
+ )
+ update_payload = build_preprint_update_payload(
+ preprint._id,
+ attributes={'doi': '10.1234/test'}
+ )
res = app.patch_json_api(url, update_payload, auth=user.auth)
assert res.status_code == 200
+ preprint_doi = DOI_FORMAT.format(prefix=preprint.provider.doi_prefix, guid=preprint._id)
+ update_payload = build_preprint_update_payload(
+ preprint._id,
+ attributes={'doi': preprint_doi}
+ )
+ res = app.patch_json_api(url, update_payload, auth=user.auth, expect_errors=True)
+ assert res.status_code == 400
+ error_data = res.json['errors']
+ assert ' is already associated with this preprint' in error_data[0]['detail']
+
preprint.reload()
- assert preprint.article_doi == new_doi
+ assert preprint.article_doi == '10.1234/test'
preprint_detail = app.get(url, auth=user.auth).json['data']
- assert preprint_detail['links']['doi'] == 'https://doi.org/{}'.format(
- new_doi)
+ assert preprint_detail['links']['doi'] == f'https://doi.org/10.1234/test'
def test_title_has_a_512_char_limit(self, app, user, preprint, url):
new_title = 'a' * 513
@@ -746,9 +777,18 @@ def test_update_published_does_not_make_node_public(
assert unpublished.node.is_public is False
assert unpublished.is_public
+ @responses.activate
@mock.patch('osf.models.preprint.update_or_enqueue_on_preprint_updated')
def test_update_preprint_task_called_on_api_update(
self, mock_on_preprint_updated, app, user, preprint, url):
+ responses.add(
+ responses.Response(
+ responses.POST,
+ CROSSREF_URL,
+ content_type='text/html;charset=ISO-8859-1',
+ status=200,
+ ),
+ )
update_doi_payload = build_preprint_update_payload(
preprint._id, attributes={'doi': '10.1234/ASDFASDF'})
diff --git a/api_tests/preprints/views/test_preprint_files_list.py b/api_tests/preprints/views/test_preprint_files_list.py
index 52e6d2cb715..47686fd3972 100644
--- a/api_tests/preprints/views/test_preprint_files_list.py
+++ b/api_tests/preprints/views/test_preprint_files_list.py
@@ -12,7 +12,7 @@
from osf.utils.permissions import WRITE
from osf.utils.workflows import DefaultStates
from addons.osfstorage.models import OsfStorageFile
-
+from website import settings
class TestPreprintProvidersList(ApiTestCase):
def setUp(self):
@@ -175,6 +175,10 @@ def test_returns_provider_data(self):
assert_equal(data['attributes']['preprint'], self.preprint._id)
assert_equal(data['attributes']['path'], '/')
assert_equal(data['attributes']['node'], None)
+ assert_equal(
+ data['relationships']['target']['links']['related']['href'],
+ f'{settings.API_DOMAIN}v2/preprints/{self.preprint._id}/'
+ )
def test_osfstorage_file_data_not_found(self):
res = self.app.get(
@@ -295,20 +299,20 @@ def test_deleted_preprint_files(self):
# Unauthenticated
res = self.app.get(self.url, expect_errors=True)
- assert res.status_code == 404
+ assert res.status_code == 410
# Noncontrib
res = self.app.get(self.url, auth=self.user_two.auth, expect_errors=True)
- assert res.status_code == 404
+ assert res.status_code == 410
# Write contributor
self.preprint.add_contributor(self.user_two, WRITE, save=True)
res = self.app.get(self.url, auth=self.user_two.auth, expect_errors=True)
- assert res.status_code == 404
+ assert res.status_code == 410
# Admin contrib
res = self.app.get(self.url, auth=self.user.auth, expect_errors=True)
- assert res.status_code == 404
+ assert res.status_code == 410
def test_withdrawn_preprint_files(self):
self.preprint.date_withdrawn = timezone.now()
@@ -360,7 +364,7 @@ def test_not_just_primary_file_returned(self):
data = res.json['data']
assert len(data) == 2
- assert data[0]['id'] == self.preprint.primary_file._id
+ assert set([item['id'] for item in data]) == {second_file._id, self.preprint.primary_file._id}
def test_nested_file_as_primary_file_is_returned(self):
# Primary file can be any file nested somewhere under the preprint's root folder.
diff --git a/api_tests/users/views/test_user_detail.py b/api_tests/users/views/test_user_detail.py
index 9ac8e85b735..dfaadf963ff 100644
--- a/api_tests/users/views/test_user_detail.py
+++ b/api_tests/users/views/test_user_detail.py
@@ -1299,7 +1299,6 @@ def test_user_put_profile_date_validate_ongoing_position(self, app, user_one, us
del request_payload['data']['attributes'][request_key][0]['endYear']
res = app.put_json_api(user_one_url, request_payload, auth=user_one.auth, expect_errors=True)
assert res.status_code == 400
- assert res.json['errors'][0]['detail'] == "For 'ongoing' the field value True is not valid under any of the given schemas"
def test_user_put_profile_date_validate_end_date(self, app, user_one, user_one_url, request_payload, request_key):
# End date is greater then start date
@@ -1325,7 +1324,6 @@ def test_user_put_profile_date_validate_start_date_no_end_date_not_ongoing(self,
res = app.put_json_api(user_one_url, start_dates_no_end_dates_payload, auth=user_one.auth, expect_errors=True)
user_one.reload()
assert res.status_code == 400
- assert res.json['errors'][0]['detail'] == "For 'ongoing' the field value True is not valid under any of the given schemas"
def test_user_put_profile_date_validate_end_date_no_start_date(self, app, user_one, user_attr, user_one_url, end_dates_no_start_dates_payload, request_key):
# End dates, but no start dates
diff --git a/conftest.py b/conftest.py
index ba56ee44d66..2b59ce93a21 100644
--- a/conftest.py
+++ b/conftest.py
@@ -191,12 +191,14 @@ def mock_datacite(registration):
data = ET.tostring(base_xml)
with mock.patch.object(website_settings, 'DATACITE_ENABLED', True):
- with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
- rsps.add(responses.GET, f'{website_settings.DATACITE_URL}/metadata', body=data, status=200)
- rsps.add(responses.POST, f'{website_settings.DATACITE_URL}/metadata', body=f'OK ({doi})', status=201)
- rsps.add(responses.POST, f'{website_settings.DATACITE_URL}/doi', body=f'OK ({doi})', status=201)
- rsps.add(responses.DELETE, f'{website_settings.DATACITE_URL}/metadata/{doi}', status=200)
- yield rsps
+ with mock.patch.object(website_settings, 'DATACITE_USERNAME', 'TestDataciteUsername'):
+ with mock.patch.object(website_settings, 'DATACITE_PASSWORD', 'TestDatacitePassword'):
+ with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
+ rsps.add(responses.GET, f'{website_settings.DATACITE_URL}/metadata', body=data, status=200)
+ rsps.add(responses.POST, f'{website_settings.DATACITE_URL}/metadata', body=f'OK ({doi})', status=201)
+ rsps.add(responses.POST, f'{website_settings.DATACITE_URL}/doi', body=f'OK ({doi})', status=201)
+ rsps.add(responses.DELETE, f'{website_settings.DATACITE_URL}/metadata/{doi}', status=200)
+ yield rsps
@pytest.fixture
diff --git a/framework/auth/cas.py b/framework/auth/cas.py
index 939a5e59821..183eddf3478 100644
--- a/framework/auth/cas.py
+++ b/framework/auth/cas.py
@@ -10,6 +10,7 @@
from framework.auth import authenticate, external_first_login_authenticate
from framework.auth.core import get_user, generate_verification_key
from framework.auth.utils import print_cas_log, LogLevel
+from framework.celery_tasks.handlers import enqueue_task
from framework.flask import redirect
from framework.exceptions import HTTPError
from website import settings
@@ -376,6 +377,9 @@ def get_user_from_cas_resp(cas_resp):
external_id=external_credential['id'])
# existing user found
if user:
+ # Send to celery the following async task to affiliate the user with eligible institutions if verified
+ from framework.auth.tasks import update_affiliation_for_orcid_sso_users
+ enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, external_credential['id']))
return user, external_credential, 'authenticate'
# user first time login through external identity provider
else:
diff --git a/framework/auth/tasks.py b/framework/auth/tasks.py
index 926b212b8d8..56faf3be871 100644
--- a/framework/auth/tasks.py
+++ b/framework/auth/tasks.py
@@ -1,11 +1,23 @@
from datetime import datetime
+import itertools
+import logging
+
+from lxml import etree
import pytz
+import requests
+
+from framework import sentry
+from framework.celery_tasks import app as celery_app
+from website.settings import (DATE_LAST_LOGIN_THROTTLE_DELTA, EXTERNAL_IDENTITY_PROFILE,
+ ORCID_PUBLIC_API_V3_URL, ORCID_PUBLIC_API_ACCESS_TOKEN,
+ ORCID_PUBLIC_API_REQUEST_TIMEOUT, ORCID_RECORD_ACCEPT_TYPE,
+ ORCID_RECORD_EDUCATION_PATH, ORCID_RECORD_EMPLOYMENT_PATH)
+
-from framework.celery_tasks import app
-from website.settings import DATE_LAST_LOGIN_THROTTLE_DELTA
+logger = logging.getLogger(__name__)
-@app.task
+@celery_app.task()
def update_user_from_activity(user_id, login_time, cas_login=False, updates=None):
from osf.models import OSFUser
if not updates:
@@ -27,3 +39,122 @@ def update_user_from_activity(user_id, login_time, cas_login=False, updates=None
should_save = True
if should_save:
user.save()
+
+
+@celery_app.task()
+def update_affiliation_for_orcid_sso_users(user_id, orcid_id):
+ """This is an asynchronous task that runs during CONFIRMED ORCiD SSO logins and makes eligible
+ institution affiliations.
+ """
+ from osf.models import OSFUser
+ user = OSFUser.load(user_id)
+ if not user or not verify_user_orcid_id(user, orcid_id):
+ # This should not happen as long as this task is called at the right place at the right time.
+ error_message = f'Invalid ORCiD ID [{orcid_id}] for [{user_id}]' if user else f'User [{user_id}] Not Found'
+ logger.error(error_message)
+ sentry.log_message(error_message)
+ return
+ institution = check_institution_affiliation(orcid_id)
+ if institution:
+ logger.info(f'Eligible institution affiliation has been found for ORCiD SSO user: '
+ f'institution=[{institution._id}], user=[{user_id}], orcid_id=[{orcid_id}]')
+ if not user.is_affiliated_with_institution(institution):
+ user.affiliated_institutions.add(institution)
+
+
+def verify_user_orcid_id(user, orcid_id):
+ """Verify that the given ORCiD ID is verified for the given user.
+ """
+ provider = EXTERNAL_IDENTITY_PROFILE.get('OrcidProfile')
+ status = user.external_identity.get(provider, {}).get(orcid_id, None)
+ return status == 'VERIFIED'
+
+
+def check_institution_affiliation(orcid_id):
+ """Check user's public ORCiD record and return eligible institution affiliations.
+
+ Note: Current implementation only support one affiliation (i.e. loop returns once eligible
+ affiliation is found, which improves performance). In the future, if we have multiple
+ institutions using this feature, we can update the loop easily.
+ """
+ from osf.models import Institution
+ from osf.models.institution import IntegrationType
+ employment_source_list = get_orcid_employment_sources(orcid_id)
+ education_source_list = get_orcid_education_sources(orcid_id)
+ via_orcid_institutions = Institution.objects.filter(
+ delegation_protocol=IntegrationType.AFFILIATION_VIA_ORCID.value,
+ is_deleted=False
+ )
+ # Check both employment and education records
+ for source in itertools.chain(employment_source_list, education_source_list):
+ # Check source against all "affiliation-via-orcid" institutions
+ for institution in via_orcid_institutions:
+ if source == institution.orcid_record_verified_source:
+ logger.debug(f'Institution has been found with matching source: '
+ f'institution=[{institution._id}], source=[{source}], orcid_id=[{orcid_id}]')
+ return institution
+ logger.debug(f'No institution with matching source has been found: orcid_id=[{orcid_id}]')
+ return None
+
+
+def get_orcid_employment_sources(orcid_id):
+ """Retrieve employment records for the given ORCiD ID.
+ """
+ employment_data = orcid_public_api_make_request(ORCID_RECORD_EMPLOYMENT_PATH, orcid_id)
+ source_list = []
+ if employment_data is not None:
+ affiliation_groups = employment_data.findall('{http://www.orcid.org/ns/activities}affiliation-group')
+ for affiliation_group in affiliation_groups:
+ employment_summary = affiliation_group.find('{http://www.orcid.org/ns/employment}employment-summary')
+ source = employment_summary.find('{http://www.orcid.org/ns/common}source')
+ source_name = source.find('{http://www.orcid.org/ns/common}source-name')
+ source_list.append(source_name.text)
+ return source_list
+
+
+def get_orcid_education_sources(orcid_id):
+ """Retrieve education records for the given ORCiD ID.
+ """
+ education_data = orcid_public_api_make_request(ORCID_RECORD_EDUCATION_PATH, orcid_id)
+ source_list = []
+ if education_data is not None:
+ affiliation_groups = education_data.findall('{http://www.orcid.org/ns/activities}affiliation-group')
+ for affiliation_group in affiliation_groups:
+ education_summary = affiliation_group.find('{http://www.orcid.org/ns/education}education-summary')
+ source = education_summary.find('{http://www.orcid.org/ns/common}source')
+ source_name = source.find('{http://www.orcid.org/ns/common}source-name')
+ source_list.append(source_name.text)
+ return source_list
+
+
+def orcid_public_api_make_request(path, orcid_id):
+ """Make the ORCiD public API request and returned a deserialized response.
+ """
+ request_url = ORCID_PUBLIC_API_V3_URL + orcid_id + path
+ headers = {
+ 'Accept': ORCID_RECORD_ACCEPT_TYPE,
+ 'Authorization': f'Bearer {ORCID_PUBLIC_API_ACCESS_TOKEN}',
+ }
+ try:
+ response = requests.get(request_url, headers=headers, timeout=ORCID_PUBLIC_API_REQUEST_TIMEOUT)
+ except Exception:
+ error_message = f'ORCiD public API request has encountered an exception: url=[{request_url}]'
+ logger.error(error_message)
+ sentry.log_message(error_message)
+ sentry.log_exception()
+ return None
+ if response.status_code != 200:
+ error_message = f'ORCiD public API request has failed: url=[{request_url}], ' \
+ f'status=[{response.status_code}], response = [{response.content}]'
+ logger.error(error_message)
+ sentry.log_message(error_message)
+ return None
+ try:
+ xml_data = etree.XML(response.content)
+ except Exception:
+ error_message = 'Fail to read and parse ORCiD record response as XML'
+ logger.error(error_message)
+ sentry.log_message(error_message)
+ sentry.log_exception()
+ return None
+ return xml_data
diff --git a/framework/auth/views.py b/framework/auth/views.py
index 0863c702fc1..fab98ec293c 100644
--- a/framework/auth/views.py
+++ b/framework/auth/views.py
@@ -20,6 +20,7 @@
from framework.auth.decorators import block_bing_preview, collect_auth, must_be_logged_in
from framework.auth.forms import ResendConfirmationForm, ForgotPasswordForm, ResetPasswordForm
from framework.auth.utils import ensure_external_identity_uniqueness, validate_recaptcha
+from framework.celery_tasks.handlers import enqueue_task
from framework.exceptions import HTTPError
from framework.flask import redirect # VOL-aware redirect
from framework.sessions.utils import remove_sessions_for_user, remove_session
@@ -672,6 +673,10 @@ def external_login_confirm_email_get(auth, uid, token):
can_change_preferences=False,
)
+ # Send to celery the following async task to affiliate the user with eligible institutions if verified
+ from framework.auth.tasks import update_affiliation_for_orcid_sso_users
+ enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id))
+
# redirect to CAS and authenticate the user with the verification key
return redirect(cas.get_login_url(
service_url,
diff --git a/osf/exceptions.py b/osf/exceptions.py
index f250be49ab3..8b83bf5d174 100644
--- a/osf/exceptions.py
+++ b/osf/exceptions.py
@@ -223,3 +223,21 @@ def __init__(self, response, invalid_responses=None, unsupported_keys=None):
)
super().__init__(error_message)
+
+
+class IdentifierHasReferencesError(OSFError):
+ pass
+
+
+class NoPIDError(OSFError):
+ pass
+
+
+class CannotFinalizeArtifactError(OSFError):
+
+ def __init__(self, artifact, incomplete_fields):
+ self.incomplete_fields = incomplete_fields
+ self.message = (
+ f'Could not set `finalized=True` for OutcomeArtifact with id [{artifact._id}]. '
+ f'The following required fields are not set: {incomplete_fields}'
+ )
diff --git a/osf/management/commands/update_institution_sso_email_domain.py b/osf/management/commands/update_institution_sso_email_domain.py
new file mode 100644
index 00000000000..7c14bfac2cd
--- /dev/null
+++ b/osf/management/commands/update_institution_sso_email_domain.py
@@ -0,0 +1,152 @@
+from enum import IntEnum
+import logging
+
+from django.core.management.base import BaseCommand
+
+from framework import sentry
+from framework.auth import get_user
+from osf.exceptions import BlockedEmailError, ValidationError
+from osf.models import Institution, OSFUser
+from osf.models.validators import validate_email
+
+logger = logging.getLogger(__name__)
+
+
+class UpdateResult(IntEnum):
+ """Defines 4 states of the email update outcome.
+ """
+ SUCCEEDED = 0 # The email has successfully been added
+ SKIPPED = 1 # The email was added (to the user) before the script is run, or the eligible email is not found
+ FAILED = 2 # The email failed to be added since it belongs to another account
+ ERRORED = 3 # The email failed to be added due to unexpected exceptions
+
+
+class Command(BaseCommand):
+ """Update emails of users from a given affiliated institution (when eligible).
+ """
+
+ def add_arguments(self, parser):
+ super(Command, self).add_arguments(parser)
+ parser.add_argument(
+ 'institution_id',
+ type=str,
+ help='the institution whose affiliated users\' eligible email is to be updated'
+ )
+ parser.add_argument(
+ 'src_domain',
+ type=str,
+ help='the email domain that are eligible for update'
+ )
+ parser.add_argument(
+ 'dst_domain',
+ type=str,
+ help='the email domain that is to be added'
+ )
+ parser.add_argument(
+ '--dry',
+ action='store_true',
+ dest='dry_run',
+ help='If true, iterate through eligible users and emails but don\'t add the email'
+ )
+
+ def handle(self, *args, **options):
+
+ institution_id = options.get('institution_id', '').lower()
+ src_domain = options.get('src_domain', '').lower()
+ dst_domain = options.get('dst_domain', '').lower()
+ dry_run = options.get('dry_run', True)
+
+ if dry_run:
+ logger.warning('This is a dry-run pass.')
+
+ # Verify the institution
+ institution = Institution.load(institution_id)
+ if not institution:
+ message = 'Error: invalid institution ID [{}]'.format(institution_id)
+ logger.error(message)
+ sentry.log_message(message)
+ return
+
+ # Find all users affiliated with the given institution
+ affiliated_users = OSFUser.objects.filter(affiliated_institutions___id=institution_id)
+
+ # Update email domain for each user
+ update_result = {
+ UpdateResult.SUCCEEDED.name: {},
+ UpdateResult.SKIPPED.name: {},
+ UpdateResult.FAILED.name: {},
+ UpdateResult.ERRORED.name: {},
+ }
+ for user in affiliated_users:
+ user_result = update_email_domain(user, src_domain, dst_domain, dry_run=dry_run)
+ if user_result.get(UpdateResult.SUCCEEDED.name):
+ update_result.get(UpdateResult.SUCCEEDED.name)[user._id] = user_result.get(UpdateResult.SUCCEEDED.name)
+ if user_result.get(UpdateResult.SKIPPED.name):
+ update_result.get(UpdateResult.SKIPPED.name)[user._id] = user_result.get(UpdateResult.SKIPPED.name)
+ if user_result.get(UpdateResult.FAILED.name):
+ update_result.get(UpdateResult.FAILED.name)[user._id] = user_result.get(UpdateResult.FAILED.name)
+ if user_result.get(UpdateResult.ERRORED.name):
+ update_result.get(UpdateResult.ERRORED.name)[user._id] = user_result.get(UpdateResult.ERRORED.name)
+
+ # Output update results to console
+ logger.info(f'{UpdateResult.SUCCEEDED.name} = {update_result.get(UpdateResult.SUCCEEDED.name)}')
+ logger.info(f'{UpdateResult.SKIPPED.name} = {update_result.get(UpdateResult.SKIPPED.name)}')
+ logger.warning(f'{UpdateResult.FAILED.name} = {update_result.get(UpdateResult.FAILED.name)}')
+ logger.error(f'{UpdateResult.ERRORED.name} = {update_result.get(UpdateResult.ERRORED.name)}')
+ if dry_run:
+ logger.warning(f'The above output is the result from a dry-run pass! '
+ f'{UpdateResult.SUCCEEDED.name} ones were not actually added!')
+
+
+def update_email_domain(user, src_domain, dst_domain, dry_run=True):
+ """For a given user, if it has an email `example@`, attempt to add `example@` to this
+ account. The action will be skipped if `example@` already exists on this user or no email is found
+ under `@`. The action will fail if `example@` already exists on another account. Other
+ unexpected exceptions will be considered as error.
+ """
+
+ # Find eligible emails to add
+ emails_to_add = []
+ user_result = {
+ UpdateResult.SUCCEEDED.name: [],
+ UpdateResult.SKIPPED.name: [],
+ UpdateResult.FAILED.name: [],
+ UpdateResult.ERRORED.name: [],
+ }
+ for email in user.emails.filter(address__endswith=f'@{src_domain}'):
+ email_parts = email.address.split('@')
+ name_part = email_parts[0].lower()
+ domain_part = email_parts[1].lower()
+ if domain_part == src_domain:
+ emails_to_add.append(f'{name_part}@{dst_domain}')
+ if not emails_to_add:
+ logger.warning(f'Action skipped due to no eligible email found for user [{user._id}]!')
+ return user_result
+ # Verify and attempt to add email; keep track of successes, failures and errors
+ for email in emails_to_add:
+ try:
+ validate_email(email)
+ except (ValidationError, BlockedEmailError):
+ logger.error(f'Email validation failed when adding [{email}] to user [{user._id}]!')
+ sentry.log_exception()
+ user_result.get(UpdateResult.ERRORED.name).append(email)
+ continue
+ duplicate_user = get_user(email=email)
+ if not duplicate_user:
+ try:
+ if not dry_run:
+ user.emails.create(address=email)
+ except Exception:
+ logger.error(f'An unexpected error occurred when adding email [{email}] to user [{user._id}]!')
+ sentry.log_exception()
+ user_result.get(UpdateResult.ERRORED.name).append(email)
+ else:
+ logger.info(f'Successfully added email [{email}] to user [{user._id}]!')
+ user_result.get(UpdateResult.SUCCEEDED.name).append(email)
+ elif duplicate_user == user:
+ logger.info(f'Action skipped since email [{email}] exists for the same user [{user._id}]!')
+ user_result.get(UpdateResult.SKIPPED.name).append(email)
+ else:
+ logger.warning(f'Action aborted since email [{email}] exists for a different user [{duplicate_user._id}]!')
+ user_result.get(UpdateResult.FAILED.name).append(email)
+ return user_result
diff --git a/osf/metadata/utils.py b/osf/metadata/utils.py
index 82e2311d137..dd82730c190 100644
--- a/osf/metadata/utils.py
+++ b/osf/metadata/utils.py
@@ -23,44 +23,74 @@
}
+def datacite_format_name_identifiers(user):
+ data = {
+ 'nameIdentifiers': [
+ {
+ 'nameIdentifier': f'{settings.DOMAIN}{user._id}/',
+ 'nameIdentifierScheme': 'URL',
+ }
+ ]
+ }
+ orcid = user.get_verified_external_id('ORCID', verified_only=True)
+ if orcid:
+ data['nameIdentifiers'].append({
+ 'nameIdentifier': orcid,
+ 'nameIdentifierScheme': 'ORCID',
+ 'schemeURI': 'http://orcid.org/'
+ })
+
+ return data
+
+
+def datacite_format_affiliations(user):
+ data = {'affiliation': []}
+ for affiliated_institution in user.affiliated_institutions.all():
+ data['affiliation'].append({
+ 'name': affiliated_institution.name,
+ })
+
+ if affiliated_institution.identifier_domain:
+ data['affiliation'].append({
+ 'name': affiliated_institution.name,
+ 'affiliationIdentifier': affiliated_institution.identifier_domain,
+ 'affiliationIdentifierScheme': 'URL',
+ })
+
+ if affiliated_institution.ror_uri:
+ data['affiliation'].append(
+ {
+ 'name': affiliated_institution.name,
+ 'affiliationIdentifier': affiliated_institution.ror_uri,
+ 'affiliationIdentifierScheme': 'ROR',
+ 'SchemeURI': 'https://ror.org/',
+ }
+ )
+
+ return data
+
+
def datacite_format_creators(creators):
""" Format a list of contributors to match the datacite schema
Schema found here: https://schema.datacite.org/meta/kernel-4.3/doc/DataCite-MetadataKernel_v4.3.pdf
- :param contributors_list: list of OSFUsers to format
+ :param creators: list of OSFUsers to format
:return: formatted json for datacite
"""
creators_json = []
for creator in creators:
- name_identifiers = [
- {
- 'nameIdentifier': f'{creator._id}/',
- 'nameIdentifierScheme': 'OSF',
- 'schemeURI': settings.DOMAIN
- }
- ]
- affiliated_institutions = [{
- 'affiliation': institution.name,
- 'affiliationIdentifier': 'OSF',
- 'schemeURI': settings.DOMAIN
- } for institution in creator.affiliated_institutions.all()]
-
- if creator.external_identity.get('ORCID'):
- verified = list(creator.external_identity['ORCID'].values())[0] == 'VERIFIED'
- if verified:
- name_identifiers.append({
- 'nameIdentifier': list(creator.external_identity['ORCID'].keys())[0],
- 'nameIdentifierScheme': 'ORCID',
- 'schemeURI': 'http://orcid.org/'
- })
-
- creators_json.append({
- 'nameIdentifiers': name_identifiers,
- 'affiliations': affiliated_institutions,
+ data = {}
+ if creator.affiliated_institutions.exists():
+ data.update(datacite_format_affiliations(creator))
+ data.update(datacite_format_name_identifiers(creator))
+ data.update({
+ 'nameType': 'Personal',
'creatorName': creator.fullname,
'familyName': creator.family_name,
- 'givenName': creator.given_name
+ 'givenName': creator.given_name,
+ 'name': creator.fullname
})
+ creators_json.append(data)
return creators_json
@@ -69,36 +99,24 @@ def datacite_format_contributors(contributors):
""" Format a list of contributors to match the datacite schema
Schema found here: https://schema.datacite.org/meta/kernel-4.3/doc/DataCite-MetadataKernel_v4.3.pdf
- :param contributors_list: list of OSFUsers to format
+ :param contributors: list of OSFUsers to format
:return: formatted json for datacite
"""
contributors_json = []
for contributor in contributors:
- name_identifiers = [
- {
- 'nameIdentifier': f'{contributor._id}/',
- 'nameIdentifierScheme': 'OSF',
- 'schemeURI': settings.DOMAIN
- }
- ]
-
- if contributor.external_identity.get('ORCID'):
- verified = list(contributor.external_identity['ORCID'].values())[0] == 'VERIFIED'
- if verified:
- name_identifiers.append({
- 'nameIdentifier': list(contributor.external_identity['ORCID'].keys())[0],
- 'nameIdentifierScheme': 'ORCID',
- 'schemeURI': 'http://orcid.org/'
- })
-
- contributors_json.append({
- 'nameIdentifiers': name_identifiers,
- 'contributorName': contributor.fullname,
+ data = {}
+ if contributor.affiliated_institutions.exists():
+ data.update(datacite_format_affiliations(contributor))
+ data.update(datacite_format_name_identifiers(contributor))
+ data.update({
+ 'nameType': 'Personal',
'contributorType': 'ProjectMember',
+ 'contributorName': contributor.fullname,
'familyName': contributor.family_name,
- 'givenName': contributor.given_name
+ 'givenName': contributor.given_name,
+ 'name': contributor.fullname,
})
-
+ contributors_json.append(data)
return contributors_json
diff --git a/osf/migrations/0243_auto_20220324_1105.py b/osf/migrations/0243_auto_20220324_1105.py
deleted file mode 100644
index ca0808c5a6e..00000000000
--- a/osf/migrations/0243_auto_20220324_1105.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by Django 1.11.28 on 2022-03-24 11:05
-from __future__ import unicode_literals
-
-from django.db import migrations, models
-from django.db.utils import ProgrammingError
-
-
-def remove_if_exist(apps, schema_editor):
- try:
- schema_editor.execute(
- schema_editor.sql_delete_table % {
- 'table': schema_editor.quote_name('quickfilesnode')
- }
- )
- except (KeyError, ProgrammingError):
- ## No quickfilesnode to delete all good
- pass
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('osf', '0242_auto_20220125_1604'),
- ]
-
- operations = [
- migrations.RunPython(remove_if_exist, migrations.RunPython.noop),
- migrations.AlterField(
- model_name='abstractnode',
- name='type',
- field=models.CharField(choices=[('osf.node', 'node'), ('osf.draftnode', 'draft node'), ('osf.registration', 'registration')], db_index=True, max_length=255),
- ),
- ]
diff --git a/osf/migrations/0244_auto_20220517_1718.py b/osf/migrations/0244_auto_20220517_1718.py
new file mode 100644
index 00000000000..bdcbbe0de09
--- /dev/null
+++ b/osf/migrations/0244_auto_20220517_1718.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.29 on 2022-05-17 17:18
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('osf', '0243_auto_20211025_1353'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='institution',
+ name='identifier_domain',
+ field=models.URLField(blank=True, help_text='The full domain this institutions that will appear in DOI metadata.', max_length=500, null=True),
+ ),
+ migrations.AddField(
+ model_name='institution',
+ name='ror_uri',
+ field=models.URLField(blank=True, help_text='The full URI for the this institutions ROR.', max_length=500, null=True),
+ ),
+ ]
diff --git a/osf/migrations/0245_auto_20220621_1950.py b/osf/migrations/0245_auto_20220621_1950.py
new file mode 100644
index 00000000000..0cfc0b6d7da
--- /dev/null
+++ b/osf/migrations/0245_auto_20220621_1950.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.29 on 2022-06-21 19:50
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('osf', '0244_auto_20220517_1718'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='institution',
+ name='orcid_record_verified_source',
+ field=models.CharField(blank=True, default='', max_length=255),
+ ),
+ migrations.AlterField(
+ model_name='institution',
+ name='delegation_protocol',
+ field=models.CharField(blank=True, choices=[('saml-shib', 'SAML_SHIBBOLETH'), ('cas-pac4j', 'CAS_PAC4J'), ('oauth-pac4j', 'OAUTH_PAC4J'), ('via-orcid', 'AFFILIATION_VIA_ORCID'), ('', 'NONE')], default='', max_length=15),
+ ),
+ ]
diff --git a/osf/migrations/0246_add_outcomes_and_artifacts.py b/osf/migrations/0246_add_outcomes_and_artifacts.py
new file mode 100644
index 00000000000..586fcbff75b
--- /dev/null
+++ b/osf/migrations/0246_add_outcomes_and_artifacts.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.29 on 2022-07-14 18:50
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import django.db.models.deletion
+import django_extensions.db.fields
+import osf.models.base
+import osf.models.validators
+import osf.utils.outcomes
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('osf', '0245_auto_20220621_1950'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='Outcome',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
+ ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
+ ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
+ ('title', models.TextField(validators=[osf.models.validators.validate_title])),
+ ('description', models.TextField(blank=True, default='')),
+ ('category', models.CharField(blank=True, choices=[('analysis', 'Analysis'), ('communication', 'Communication'), ('data', 'Data'), ('hypothesis', 'Hypothesis'), ('instrumentation', 'Instrumentation'), ('methods and measures', 'Methods and Measures'), ('procedure', 'Procedure'), ('project', 'Project'), ('software', 'Software'), ('other', 'Other'), ('', 'Uncategorized')], default='', max_length=255)),
+ ('affiliated_institutions', models.ManyToManyField(related_name='outcomes', to='osf.Institution')),
+ ],
+ options={
+ 'abstract': False,
+ },
+ bases=(models.Model, osf.models.base.QuerySetExplainMixin),
+ ),
+ migrations.CreateModel(
+ name='OutcomeArtifact',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
+ ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
+ ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
+ ('artifact_type', models.IntegerField(choices=[(0, 'UNDEFINED'), (1, 'DATA'), (11, 'CODE'), (21, 'MATERIALS'), (31, 'PAPERS'), (41, 'SUPPLEMENTS'), (1001, 'PRIMARY')], default=osf.utils.outcomes.ArtifactTypes(0))),
+ ('title', models.TextField()),
+ ('description', models.TextField()),
+ ('identifier', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='artifact_metadata', to='osf.Identifier')),
+ ('outcome', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='artifact_metadata', to='osf.Outcome')),
+ ],
+ options={
+ 'ordering': ['artifact_type', 'title'],
+ },
+ bases=(models.Model, osf.models.base.QuerySetExplainMixin),
+ ),
+ migrations.AddField(
+ model_name='outcome',
+ name='artifacts',
+ field=models.ManyToManyField(through='osf.OutcomeArtifact', to='osf.Identifier'),
+ ),
+ migrations.AddField(
+ model_name='outcome',
+ name='node_license',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='outcomes', to='osf.NodeLicenseRecord'),
+ ),
+ migrations.AddField(
+ model_name='outcome',
+ name='subjects',
+ field=models.ManyToManyField(blank=True, related_name='outcomes', to='osf.Subject'),
+ ),
+ migrations.AddField(
+ model_name='outcome',
+ name='tags',
+ field=models.ManyToManyField(related_name='outcome_tagged', to='osf.Tag'),
+ ),
+ migrations.AddIndex(
+ model_name='outcomeartifact',
+ index=models.Index(fields=['outcome', 'artifact_type'], name='osf_outcome_outcome_a62f5c_idx'),
+ ),
+ migrations.AlterUniqueTogether(
+ name='outcomeartifact',
+ unique_together=set([('outcome', 'identifier', 'artifact_type')]),
+ ),
+ ]
diff --git a/osf/migrations/0247_artifact_finalized_and_deleted.py b/osf/migrations/0247_artifact_finalized_and_deleted.py
new file mode 100644
index 00000000000..24f4a19fe26
--- /dev/null
+++ b/osf/migrations/0247_artifact_finalized_and_deleted.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.29 on 2022-07-25 15:39
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import osf.utils.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('osf', '0246_add_outcomes_and_artifacts'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='outcomeartifact',
+ name='deleted',
+ field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True),
+ ),
+ migrations.AddField(
+ model_name='outcomeartifact',
+ name='finalized',
+ field=models.BooleanField(default=False),
+ ),
+ migrations.AlterField(
+ model_name='outcomeartifact',
+ name='description',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='outcomeartifact',
+ name='title',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='outcomeartifact',
+ name='artifact_type',
+ field=models.IntegerField(choices=[(0, 'UNDEFINED'), (1, 'DATA'), (11, 'ANALYTIC_CODE'), (21, 'MATERIALS'), (31, 'PAPERS'), (41, 'SUPPLEMENTS'), (1001, 'PRIMARY')], default=osf.utils.outcomes.ArtifactTypes(0)),
+ ),
+ ]
diff --git a/osf/migrations/0248_artifact_tweaks.py b/osf/migrations/0248_artifact_tweaks.py
new file mode 100644
index 00000000000..95d8aed2029
--- /dev/null
+++ b/osf/migrations/0248_artifact_tweaks.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.29 on 2022-08-01 19:37
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('osf', '0247_artifact_finalized_and_deleted'),
+ ]
+
+ operations = [
+ migrations.RemoveIndex(
+ model_name='outcomeartifact',
+ name='osf_outcome_outcome_a62f5c_idx',
+ ),
+ migrations.AlterField(
+ model_name='outcomeartifact',
+ name='identifier',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='artifact_metadata', to='osf.Identifier'),
+ ),
+ migrations.AlterUniqueTogether(
+ name='outcomeartifact',
+ unique_together=set([]),
+ ),
+ migrations.AddIndex(
+ model_name='outcomeartifact',
+ index=models.Index(fields=['artifact_type', 'outcome'], name='osf_outcome_artifac_5eb92d_idx'),
+ ),
+ ]
diff --git a/osf/migrations/0249_schema_response_justification_to_text_field.py b/osf/migrations/0249_schema_response_justification_to_text_field.py
new file mode 100644
index 00000000000..846dd5bd9ef
--- /dev/null
+++ b/osf/migrations/0249_schema_response_justification_to_text_field.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.29 on 2022-08-03 13:47
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('osf', '0248_artifact_tweaks'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='schemaresponse',
+ name='revision_justification',
+ field=models.TextField(blank=True, null=True),
+ ),
+ ]
diff --git a/osf/migrations/__init__.py b/osf/migrations/__init__.py
index e63ebb9d990..9b856bd1306 100644
--- a/osf/migrations/__init__.py
+++ b/osf/migrations/__init__.py
@@ -52,6 +52,7 @@ def get_admin_write_permissions():
'delete_preprintprovider',
'change_subject',
'change_maintenancestate',
+ 'change_registrationschema',
'delete_maintenancestate',
'change_scheduledbanner',
'delete_scheduledbanner',
diff --git a/osf/models/__init__.py b/osf/models/__init__.py
index dc9c64dc1db..bccdec097a0 100644
--- a/osf/models/__init__.py
+++ b/osf/models/__init__.py
@@ -54,3 +54,5 @@
from osf.models.schema_response_block import SchemaResponseBlock # noqa
from osf.models.registration_bulk_upload_job import RegistrationBulkUploadJob # noqa
from osf.models.registration_bulk_upload_row import RegistrationBulkUploadRow # noqa
+from osf.models.outcomes import Outcome # noqa
+from osf.models.outcome_artifacts import OutcomeArtifact # noqa
diff --git a/osf/models/files.py b/osf/models/files.py
index 41d7f2a8185..59b8be9b818 100644
--- a/osf/models/files.py
+++ b/osf/models/files.py
@@ -191,10 +191,14 @@ def create(cls, **kwargs):
return cls(**kwargs)
@classmethod
- def get_or_create(cls, target, path):
+ def get_or_create(cls, target, path, **unused_query_params):
content_type = ContentType.objects.get_for_model(target)
try:
- obj = cls.objects.get(target_object_id=target.id, target_content_type=content_type, _path='/' + path.lstrip('/'))
+ obj = cls.objects.get(
+ target_object_id=target.id,
+ target_content_type=content_type,
+ _path='/' + path.lstrip('/'),
+ )
except cls.DoesNotExist:
obj = cls(target_object_id=target.id, target_content_type=content_type, _path='/' + path.lstrip('/'))
return obj
diff --git a/osf/models/identifiers.py b/osf/models/identifiers.py
index 5d871a74c0d..a98b53a0a02 100644
--- a/osf/models/identifiers.py
+++ b/osf/models/identifiers.py
@@ -2,6 +2,8 @@
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils import timezone
+
+from osf.exceptions import IdentifierHasReferencesError
from osf.models.base import BaseModel, ObjectIDMixin
from osf.utils.fields import NonNaiveDateTimeField
@@ -28,6 +30,12 @@ def remove(self, save=True):
if save:
self.save()
+ def delete(self):
+ '''Used to delete an orphaned Identifier (distinct from setting `deleted`)'''
+ if self.object_id or self.artifact_metadata.filter(deleted__isnull=True).exists():
+ raise IdentifierHasReferencesError
+ super().delete()
+
class IdentifierMixin(models.Model):
"""Model mixin that adds methods for getting and setting Identifier objects
diff --git a/osf/models/institution.py b/osf/models/institution.py
index 74bc6e1d0ff..f03dd445718 100644
--- a/osf/models/institution.py
+++ b/osf/models/institution.py
@@ -1,5 +1,6 @@
-import logging
+from enum import Enum
from future.moves.urllib.parse import urljoin
+import logging
from dirtyfields import DirtyFieldsMixin
@@ -22,6 +23,24 @@
logger = logging.getLogger(__name__)
+class IntegrationType(Enum):
+ """Defines 5 SSO types for OSF institution integration.
+ """
+
+ SAML_SHIBBOLETH = 'saml-shib' # SSO via SAML (Shibboleth impl) where CAS serves as the SP and institutions as IdP
+ CAS_PAC4J = 'cas-pac4j' # SSO via CAS (pac4j impl) where CAS serves as the client and institution as server
+ OAUTH_PAC4J = 'oauth-pac4j' # SSO via OAuth (pac4j impl) where CAS serves as the client and institution as server
+ AFFILIATION_VIA_ORCID = 'via-orcid' # Using ORCiD SSO for sign in; using ORCiD public API for affiliation
+ NONE = '' # Institution affiliation is done via email domain whitelist w/o SSO
+
+
+class SharedSsoAffiliationFilterCriteriaAction(Enum):
+ """Defines 2 criteria that determines if the secondary institution is eligible for affiliation via shared SSO.
+ """
+ EQUALS_TO = 'equals_to' # Type 1: SSO releases a single-value attribute with an exact value that matches
+ CONTAINS = 'contains' # Type 2: SSO releases a multi-value attribute, of which one value matches
+
+
class InstitutionManager(models.Manager):
def get_queryset(self):
@@ -51,18 +70,16 @@ class Institution(DirtyFieldsMixin, Loggable, base.ObjectIDMixin, base.BaseModel
banner_name = models.CharField(max_length=255, blank=True, null=True)
logo_name = models.CharField(max_length=255, blank=True, null=True)
- # The protocol which is used to delegate authentication.
- # Currently, we have `CAS`, `SAML`, `OAuth` available.
- # For `SAML`, we use Shibboleth.
- # For `CAS` and `OAuth`, we use pac4j.
- # Only institutions with a valid delegation protocol show up on the institution login page.
- DELEGATION_PROTOCOL_CHOICES = (
- ('cas-pac4j', 'CAS by pac4j'),
- ('oauth-pac4j', 'OAuth by pac4j'),
- ('saml-shib', 'SAML by Shibboleth'),
- ('', 'No Delegation Protocol'),
+ # Institution integration type
+ delegation_protocol = models.CharField(
+ choices=[(type.value, type.name) for type in IntegrationType],
+ max_length=15,
+ blank=True,
+ default=''
)
- delegation_protocol = models.CharField(max_length=15, choices=DELEGATION_PROTOCOL_CHOICES, blank=True, default='')
+
+ # Verified employment/education affiliation source for `via-orcid` institutions
+ orcid_record_verified_source = models.CharField(max_length=255, blank=True, default='')
# login_url and logout_url can be null or empty
login_url = models.URLField(null=True, blank=True)
@@ -80,6 +97,18 @@ class Institution(DirtyFieldsMixin, Loggable, base.ObjectIDMixin, base.BaseModel
is_deleted = models.BooleanField(default=False, db_index=True)
deleted = NonNaiveDateTimeField(null=True, blank=True)
deactivated = NonNaiveDateTimeField(null=True, blank=True)
+ ror_uri = models.URLField(
+ max_length=500,
+ null=True,
+ blank=True,
+ help_text='The full URI for the this institutions ROR.'
+ )
+ identifier_domain = models.URLField(
+ max_length=500,
+ null=True,
+ blank=True,
+ help_text='The full domain this institutions that will appear in DOI metadata.'
+ )
class Meta:
# custom permissions for use in the OSF Admin App
diff --git a/osf/models/mixins.py b/osf/models/mixins.py
index d4c89b9e027..0e826ff7adb 100644
--- a/osf/models/mixins.py
+++ b/osf/models/mixins.py
@@ -2247,7 +2247,7 @@ def stage_m2m_values(self, fieldname, resource, alternative_resource=None):
else:
return []
- def copy_editable_fields(self, resource, auth=None, alternative_resource=None, save=True):
+ def copy_editable_fields(self, resource, auth=None, alternative_resource=None, include_contributors=True, save=True):
"""
Copy various editable fields from the 'resource' object to the current object.
Includes, title, description, category, contributors, node_license, tags, subjects, and affiliated_institutions
@@ -2261,11 +2261,12 @@ def copy_editable_fields(self, resource, auth=None, alternative_resource=None, s
self.set_editable_attribute('category', resource, alternative_resource)
self.set_editable_attribute('node_license', resource, alternative_resource)
- # Contributors will always come from "resource", as contributor constraints
- # will require contributors to be present on the resource
- self.copy_contributors_from(resource)
- # Copy unclaimed records for unregistered users
- self.copy_unclaimed_records(resource)
+ if include_contributors:
+ # Contributors will always come from "resource", as contributor constraints
+ # will require contributors to be present on the resource
+ self.copy_contributors_from(resource)
+ # Copy unclaimed records for unregistered users
+ self.copy_unclaimed_records(resource)
self.tags.add(*self.stage_m2m_values('all_tags', resource, alternative_resource))
self.subjects.add(*self.stage_m2m_values('subjects', resource, alternative_resource))
diff --git a/osf/models/nodelog.py b/osf/models/nodelog.py
index c4cbb626a98..0d94653b3f9 100644
--- a/osf/models/nodelog.py
+++ b/osf/models/nodelog.py
@@ -141,6 +141,12 @@ class NodeLog(ObjectIDMixin, BaseModel):
FLAG_SPAM = 'flag_spam'
CONFIRM_SPAM = 'confirm_spam'
+ MIGRATED_QUICK_FILES = 'migrated_quickfiles'
+
+ RESOURCE_ADDED = 'resource_identifier_added'
+ RESOURCE_UPDATED = 'resource_identifier_udpated'
+ RESOURCE_REMOVED = 'resource_identifier_removed'
+
actions = ([CHECKED_IN, CHECKED_OUT, FILE_TAG_REMOVED, FILE_TAG_ADDED, CREATED_FROM, PROJECT_CREATED,
PROJECT_REGISTERED, PROJECT_DELETED, NODE_CREATED, NODE_FORKED, NODE_REMOVED,
NODE_ACCESS_REQUESTS_ENABLED, NODE_ACCESS_REQUESTS_DISABLED,
@@ -159,7 +165,8 @@ class NodeLog(ObjectIDMixin, BaseModel):
PREREG_REGISTRATION_INITIATED, PROJECT_CREATED_FROM_DRAFT_REG,
GROUP_ADDED, GROUP_UPDATED, GROUP_REMOVED,
AFFILIATED_INSTITUTION_ADDED, AFFILIATED_INSTITUTION_REMOVED, PREPRINT_INITIATED,
- PREPRINT_FILE_UPDATED, PREPRINT_LICENSE_UPDATED, VIEW_ONLY_LINK_ADDED, VIEW_ONLY_LINK_REMOVED] + list(sum([
+ PREPRINT_FILE_UPDATED, PREPRINT_LICENSE_UPDATED, VIEW_ONLY_LINK_ADDED, VIEW_ONLY_LINK_REMOVED,
+ RESOURCE_ADDED, RESOURCE_UPDATED, RESOURCE_REMOVED] + list(sum([
config.actions for config in apps.get_app_configs() if config.name.startswith('addons.')
], tuple())))
action_choices = [(action, action.upper()) for action in actions]
diff --git a/osf/models/outcome_artifacts.py b/osf/models/outcome_artifacts.py
new file mode 100644
index 00000000000..a1cac753d4f
--- /dev/null
+++ b/osf/models/outcome_artifacts.py
@@ -0,0 +1,194 @@
+from django.db import models
+from django.utils import timezone
+
+from osf.exceptions import (
+ CannotFinalizeArtifactError,
+ IdentifierHasReferencesError,
+ NoPIDError
+)
+from osf.models.base import BaseModel, ObjectIDMixin
+from osf.models.identifiers import Identifier
+from osf.utils import outcomes as outcome_utils
+from osf.utils.fields import NonNaiveDateTimeField
+
+
+'''
+This module defines the OutcomeArtifact model and its custom manager.
+
+OutcomeArtifacts are a through-table, providing some additional metadata on the relationship
+between an Outcome and an external Identifier that stores materials or provides context
+for the research effort described by the Outcome.
+'''
+
+
+ArtifactTypes = outcome_utils.ArtifactTypes
+OutcomeActions = outcome_utils.OutcomeActions
+
+
+class ArtifactManager(models.Manager):
+
+ def get_queryset(self):
+ '''Overrides default `get_queryset` behavior to add custom logic.
+
+ Automatically annotates the `pid` from any linked identifier and the
+ GUID of the primary resource for the parent artifact.
+
+ Automatically filters out deleted entries
+ '''
+ base_queryset = super().get_queryset().select_related('identifier')
+ return base_queryset.annotate(
+ pid=models.F('identifier__value'),
+ primary_resource_guid=outcome_utils.make_primary_resource_guid_annotation(base_queryset)
+ )
+
+ def for_registration(self, registration, identifier_type='doi'):
+ '''Retrieves all OutcomeArtifacts sharing an Outcome, given the Primary Registration.'''
+ registration_identifier = registration.get_identifier(identifier_type)
+ artifact_qs = self.get_queryset()
+ return artifact_qs.annotate(
+ primary_outcome=models.Subquery(
+ artifact_qs.filter(
+ identifier=registration_identifier,
+ artifact_type=ArtifactTypes.PRIMARY
+ ).values('outcome_id')[:1],
+ output_field=models.IntegerField()
+ )
+ ).filter(
+ outcome_id=models.F('primary_outcome')
+ ).exclude(
+ identifier=registration_identifier
+ )
+
+
+class OutcomeArtifact(ObjectIDMixin, BaseModel):
+ '''OutcomeArtifact is a through table that connects an Outcomes with Identifiers
+ while providing some additional, useful metadata'''
+
+ # The following fields are inherited from ObjectIdMixin
+ # _id (CharField)
+
+ # The following fields are inherited from BaseModel
+ # created (DateTimeField)
+ # modified (DateTimeField)
+
+ outcome = models.ForeignKey(
+ 'osf.outcome',
+ on_delete=models.CASCADE,
+ related_name='artifact_metadata'
+ )
+ identifier = models.ForeignKey(
+ 'osf.identifier',
+ null=True,
+ blank=True,
+ on_delete=models.SET_NULL,
+ related_name='artifact_metadata'
+ )
+
+ artifact_type = models.IntegerField(
+ null=False,
+ choices=ArtifactTypes.choices(),
+ default=ArtifactTypes.UNDEFINED,
+ )
+
+ title = models.TextField(null=False, blank=True)
+ description = models.TextField(null=False, blank=True)
+ finalized = models.BooleanField(default=False)
+ deleted = NonNaiveDateTimeField(null=True, blank=True)
+
+ objects = ArtifactManager()
+
+ class Meta:
+ indexes = [
+ models.Index(fields=['artifact_type', 'outcome'])
+ ]
+ ordering = ['artifact_type', 'title']
+
+ def update_identifier(self, new_pid_value, pid_type='doi', api_request=None):
+ '''Changes the linked Identifer to one matching the new pid_value and handles callbacks.
+
+ If `finalized` is True, will also log the change on the parent Outcome if invoked via API.
+ Will attempt to delete the previous identifier to avoid orphaned entries.
+
+ Parameters:
+ new_pid_value: The string value of the new PID
+ pid_type (str): The string "type" of the new PID (for now, only "doi" is supported)
+ api_request: The api_request data from the API call that initiated the change.
+ '''
+ if not new_pid_value:
+ raise NoPIDError(f'Cannot assign an empty PID to OutcomeArtifact with ID {self._id}')
+
+ previous_identifier = self.identifier
+ self.identifier, _ = Identifier.objects.get_or_create(
+ value=new_pid_value, category=pid_type
+ )
+ self.save()
+ if previous_identifier:
+ try:
+ previous_identifier.delete()
+ except IdentifierHasReferencesError:
+ pass
+
+ if self.finalized and api_request:
+ self.outcome.log_artifact_change(
+ action=OutcomeActions.UPDATE,
+ artifact=self,
+ api_request=api_request,
+ obsolete_identifier=previous_identifier.value if previous_identifier else None,
+ new_identifier=new_pid_value
+ )
+
+ def finalize(self, api_request=None):
+ '''Sets `finalized` to True and handles callbacks.
+
+ Logs the change on the parent Outcome if invoked via the API.
+
+ Parameters:
+ api_request: The api_request data from the API call that initiated the change.
+ '''
+ incomplete_fields = []
+ if not (self.identifier and self.identifier.value):
+ incomplete_fields.append('identifier__value')
+ if not self.artifact_type:
+ incomplete_fields.append('artifact_type')
+ if incomplete_fields:
+ raise CannotFinalizeArtifactError(self, incomplete_fields)
+
+ self.finalized = True
+ self.save()
+
+ if api_request:
+ self.outcome.log_artifact_change(
+ action=OutcomeActions.ADD,
+ artifact=self,
+ api_request=api_request,
+ new_identifier=self.identifier.value
+ )
+
+ def delete(self, api_request=None, **kwargs):
+ '''Intercept `delete` behavior on the model instance and handles callbacks.
+
+ Deletes from database if not `finalized` otherwise sets the `deleted` timestamp.
+ Logs the change on the parent Outcome if invoked via the API.
+ Attempts to delete the linked Identifier to avoid orphaned entries.
+
+ Parameters:
+ api_request: The api_request data from the API call that initiated the change.
+ '''
+ identifier = self.identifier
+ if self.finalized:
+ if api_request:
+ self.outcome.log_artifact_change(
+ action=OutcomeActions.REMOVE,
+ artifact=self,
+ api_request=api_request,
+ obsolete_identifier=identifier.value
+ )
+ self.deleted = timezone.now()
+ self.save()
+ else:
+ super().delete(**kwargs)
+
+ try:
+ identifier.delete()
+ except IdentifierHasReferencesError:
+ pass
diff --git a/osf/models/outcomes.py b/osf/models/outcomes.py
new file mode 100644
index 00000000000..b620b630841
--- /dev/null
+++ b/osf/models/outcomes.py
@@ -0,0 +1,92 @@
+from django.db import models
+from django.utils.functional import cached_property
+
+from osf.exceptions import NoPIDError
+from osf.models.base import BaseModel, ObjectIDMixin
+from osf.models.mixins import EditableFieldsMixin
+from osf.models.nodelog import NodeLog
+from osf.utils.outcomes import ArtifactTypes, OutcomeActions
+
+'''
+This module defines the Outcome model and its custom manager.
+
+Outcomes serve as a way to collect metadata about a research effort and to aggregate Identifiers
+used to share data or provide context for a that research effort, along with some additional metadata
+stored in the OutcomeArtifact through table.
+'''
+
+NODE_LOGS_FOR_OUTCOME_ACTION = {
+ OutcomeActions.ADD: NodeLog.RESOURCE_ADDED,
+ OutcomeActions.UPDATE: NodeLog.RESOURCE_UPDATED,
+ OutcomeActions.REMOVE: NodeLog.RESOURCE_REMOVED,
+}
+
+
+class OutcomeManager(models.Manager):
+
+ def for_registration(self, registration, identifier_type='doi', create=False, **kwargs):
+ registration_identifier = registration.get_identifier(category=identifier_type)
+ if not registration_identifier:
+ raise NoPIDError(f'Provided registration has no PID of type {identifier_type}')
+
+ primary_artifact = registration_identifier.artifact_metadata.filter(
+ artifact_type=ArtifactTypes.PRIMARY.value
+ ).order_by('-created').first()
+ if primary_artifact:
+ return primary_artifact.outcome
+ elif not create:
+ return None
+
+ new_outcome = self.create(**kwargs)
+ new_outcome.copy_editable_fields(registration, include_contributors=False)
+ new_outcome.artifact_metadata.create(
+ identifier=registration_identifier,
+ artifact_type=ArtifactTypes.PRIMARY,
+ finalized=True,
+ )
+ return new_outcome
+
+
+class Outcome(ObjectIDMixin, EditableFieldsMixin, BaseModel):
+ # The following fields are inherited from ObjectIdMixin
+ # _id (CharField)
+
+ # The following fields are inherited from BaseModel
+ # created (DateTimeField)
+ # modified (DateTimeField)
+
+ # The following fields inherited from EditableFieldsMixin:
+ # title (TextField)
+ # description (TextField)
+ # category (CharField)
+ # tags (Tags, M2M)
+ # subjects (Subjects, M2M)
+
+ # These override the fields inherited from EditableField Mixin
+ # This is required to avoid collisions with the related_name
+ affiliated_institutions = models.ManyToManyField('Institution', related_name='outcomes')
+ node_license = models.ForeignKey(
+ 'NodeLicenseRecord',
+ related_name='outcomes',
+ on_delete=models.SET_NULL,
+ null=True,
+ blank=True
+ )
+
+ artifacts = models.ManyToManyField('osf.Identifier', through='osf.OutcomeArtifact')
+
+ objects = OutcomeManager()
+
+ @cached_property
+ def primary_osf_resource(self):
+ return self.artifact_metadata.get(artifact_type=ArtifactTypes.PRIMARY).identifier.referent
+
+ def log_artifact_change(self, action, artifact, api_request, **log_params):
+ nodelog_action = NODE_LOGS_FOR_OUTCOME_ACTION[action]
+ nodelog_params = {'artifact_id': artifact._id, **log_params}
+
+ self.primary_osf_resource.add_log(
+ action=nodelog_action,
+ params=nodelog_params,
+ request=api_request,
+ )
diff --git a/osf/models/schema_response.py b/osf/models/schema_response.py
index 4f7a4a283b8..0b584f436d3 100644
--- a/osf/models/schema_response.py
+++ b/osf/models/schema_response.py
@@ -54,7 +54,7 @@ class SchemaResponse(ObjectIDMixin, BaseModel):
blank=True
)
- revision_justification = models.CharField(max_length=2048, null=True, blank=True)
+ revision_justification = models.TextField(null=True, blank=True)
submitted_timestamp = NonNaiveDateTimeField(null=True, blank=True)
pending_approvers = models.ManyToManyField('osf.osfuser', related_name='pending_submissions')
diff --git a/osf/models/user.py b/osf/models/user.py
index 0a55c6c11b9..f169b4c28ed 100644
--- a/osf/models/user.py
+++ b/osf/models/user.py
@@ -662,6 +662,13 @@ def __unicode__(self):
def __str__(self):
return self.get_short_name()
+ def get_verified_external_id(self, external_service, verified_only=False):
+ identifier_info = self.external_identity.get(external_service, {})
+ for external_id, status in identifier_info.items():
+ if status and status == 'VERIFIED' or not verified_only:
+ return external_id
+ return None
+
@property
def contributed(self):
return self.nodes.all()
@@ -1875,11 +1882,11 @@ def has_resources(self):
If a user only has no resources or only deleted resources this will return false and they can safely be deactivated
otherwise they must delete or transfer their outstanding resources.
- :return bool: does the user have any active node, preprints, groups etc?
+ :return bool: does the user have any active node, preprints, groups, etc?
"""
from osf.models import Preprint
- nodes = self.nodes.exclude(is_deleted=True).exists()
+ nodes = self.nodes.filter(deleted__isnull=True).exists()
groups = self.osf_groups.exists()
preprints = Preprint.objects.filter(_contributors=self, ever_public=True, deleted__isnull=True).exists()
diff --git a/osf/utils/migrations.py b/osf/utils/migrations.py
index 93629fb0941..3ede133f7d0 100644
--- a/osf/utils/migrations.py
+++ b/osf/utils/migrations.py
@@ -414,6 +414,8 @@ def create_schema_blocks_for_atomic_schema(schema):
if block_type == 'question-label':
current_group_key = generate_object_id()
block['schema_block_group_key'] = current_group_key
+ elif block_type == 'paragraph': # if a paragraph trails a question-label
+ block['schema_block_group_key'] = current_group_key
elif block_type in grouped_block_types:
block['schema_block_group_key'] = current_group_key
else:
diff --git a/osf/utils/outcomes.py b/osf/utils/outcomes.py
new file mode 100644
index 00000000000..1dde6c9e169
--- /dev/null
+++ b/osf/utils/outcomes.py
@@ -0,0 +1,52 @@
+from enum import Enum, IntEnum
+
+from django.db.models import CharField, OuterRef, Subquery
+
+
+class ArtifactTypes(IntEnum):
+ '''Labels used to classify artifacts.
+
+ Gaps are to allow space for new value to be added later while
+ controlling for display order.
+
+ PRIMARY value is arbitrarily large as it is an internal-only concept for now
+ '''
+ UNDEFINED = 0
+ DATA = 1
+ ANALYTIC_CODE = 11
+ MATERIALS = 21
+ PAPERS = 31
+ SUPPLEMENTS = 41
+ PRIMARY = 1001
+
+ @classmethod
+ def choices(cls):
+ return tuple((entry.value, entry.name) for entry in cls)
+
+
+class OutcomeActions(Enum):
+ ADD = 0
+ UPDATE = 1
+ REMOVE = 2
+
+
+def make_primary_resource_guid_annotation(base_queryset):
+ from osf.models import Guid
+ primary_artifacts_and_guids = base_queryset.filter(
+ artifact_type=ArtifactTypes.PRIMARY
+ ).annotate(
+ resource_guid=Subquery(
+ Guid.objects.filter(
+ content_type=OuterRef('identifier__content_type'),
+ object_id=OuterRef('identifier__object_id')
+ ).order_by('-created').values('_id')[:1],
+ output_field=CharField(),
+ )
+ )
+
+ return Subquery(
+ primary_artifacts_and_guids.filter(
+ outcome_id=OuterRef('outcome_id')
+ ).values('resource_guid')[:1],
+ output_field=CharField()
+ )
diff --git a/osf_tests/factories.py b/osf_tests/factories.py
index 2d4746e469b..e5fdd2241a3 100644
--- a/osf_tests/factories.py
+++ b/osf_tests/factories.py
@@ -253,6 +253,8 @@ class InstitutionFactory(DjangoModelFactory):
domains = FakeList('url', n=3)
email_domains = FakeList('domain_name', n=1)
logo_name = factory.Faker('file_name')
+ orcid_record_verified_source = ''
+ delegation_protocol = ''
class Meta:
model = models.Institution
diff --git a/osf_tests/test_guid.py b/osf_tests/test_guid.py
index d80577da754..d3b5d9bd87a 100644
--- a/osf_tests/test_guid.py
+++ b/osf_tests/test_guid.py
@@ -161,6 +161,74 @@ def test_resolve_guid_no_url(self):
)
assert res.status_code == 404
+ def test_resolve_guid_no_auth_redirect_to_cas_includes_public(self):
+ """
+ Unauthenticated users are sent to login when visiting private projects, but not if the projects are public.
+ """
+ res = self.app.get(
+ self.node.web_url_for('resolve_guid', guid=self.node._id),
+ expect_errors=True,
+ )
+ assert res.status_code == 302
+ assert '/login?service=' in res.location
+
+ self.node.is_public = True
+ self.node.save()
+ res = self.app.get(
+ self.node.web_url_for('resolve_guid', guid=self.node._id),
+ expect_errors=True,
+ )
+ assert res.status_code == 200
+
+ def test_resolve_guid_no_auth_redirect_to_cas_includes_public_with_url_segments(self):
+ """
+ Unauthenticated users are sent to login when visiting private projects related URLs, but not if the projects are
+ public
+ """
+ for segment in ('comments', 'links', 'components', 'files', 'files/osfstorage', 'files/addon'):
+ self.node.is_public = False
+ self.node.save()
+ res = self.app.get(
+ f'{self.node.web_url_for("resolve_guid", guid=self.node._id)}/{segment}/',
+ expect_errors=True,
+ )
+ assert res.status_code == 302
+ assert '/login?service=' in res.location
+
+ self.node.is_public = True
+ self.node.save()
+ res = self.app.get(
+ f'{self.node.web_url_for("resolve_guid", guid=self.node._id)}/{segment}/',
+ expect_errors=True,
+ )
+ assert res.status_code == 200
+
+ def test_resolve_guid_private_request_access_or_redirect_to_cas(self):
+ """
+ Authenticated users are sent to the request access page when it is set to true on the node, otherwise they get a
+ legacy Forbidden page.
+ """
+ non_contrib = AuthUserFactory()
+ self.node.access_requests_enabled = False
+ self.node.save()
+ res = self.app.get(
+ self.node.web_url_for('resolve_guid', guid=self.node._id),
+ auth=non_contrib.auth,
+ expect_errors=True,
+ )
+ assert 'OSF | Forbidden' in res.body.decode()
+ assert res.status_code == 403
+
+ self.node.access_requests_enabled = True
+ self.node.save()
+ res = self.app.get(
+ self.node.web_url_for('resolve_guid', guid=self.node._id),
+ auth=non_contrib.auth,
+ expect_errors=True,
+ )
+ assert res.status_code == 403
+ assert 'OSF | Request Access' in res.body.decode()
+
def test_resolve_guid_download_file(self):
pp = PreprintFactory(finish=True)
diff --git a/osf_tests/test_outcomes.py b/osf_tests/test_outcomes.py
new file mode 100644
index 00000000000..7d0e0bcce99
--- /dev/null
+++ b/osf_tests/test_outcomes.py
@@ -0,0 +1,289 @@
+import pytest
+
+from osf.exceptions import CannotFinalizeArtifactError, NoPIDError
+from osf.models import Identifier, Outcome, OutcomeArtifact
+from osf.utils.outcomes import ArtifactTypes
+from osf_tests.factories import ProjectFactory, RegistrationFactory
+
+
+TEST_REGISTRATION_DOI = 'SOME_REGISTRATION_DOI'
+TEST_PROJECT_DOI = 'SOME_PROJECT_DOI'
+TEST_EXTERNAL_DOI = 'SOME_EXTERNAL_DOI'
+
+
+@pytest.fixture
+def registration():
+ return RegistrationFactory()
+
+@pytest.fixture
+def registration_doi(registration):
+ return Identifier.objects.create(
+ referent=registration,
+ value=TEST_REGISTRATION_DOI,
+ category='doi'
+ )
+
+@pytest.fixture
+def outcome(registration_doi):
+ outcome = Outcome.objects.create()
+ OutcomeArtifact.objects.create(
+ outcome=outcome,
+ identifier=registration_doi,
+ artifact_type=ArtifactTypes.PRIMARY
+ )
+ return outcome
+
+
+@pytest.mark.django_db
+class TestOutcomes:
+
+ def test_outcome_for_registration__get__exists(self, outcome, registration):
+ stored_outcome = Outcome.objects.for_registration(registration, create=False)
+ assert stored_outcome == outcome
+
+ def test_outcome_for_registration__get__none_exists(self, registration, registration_doi):
+ assert not Outcome.objects.for_registration(registration, create=False)
+
+ def test_outcome_for_registration__get__no_registration_identifier(self, registration):
+ with pytest.raises(NoPIDError):
+ Outcome.objects.for_registration(registration)
+
+ def test_outcome_for_registration__create(self, registration, registration_doi):
+ assert not Outcome.objects.exists()
+ Outcome.objects.for_registration(registration, create=True)
+ assert Outcome.objects.exists()
+
+ def test_outcome_for_registration__create__no_identifier(self, registration):
+ with pytest.raises(NoPIDError):
+ Outcome.objects.for_registration(registration, create=True)
+
+ def test_outcome_for_registration__create_creates_primary_artifact(
+ self, registration, registration_doi
+ ):
+ outcome = Outcome.objects.for_registration(registration, create=True)
+
+ assert outcome.artifacts.count() == 1
+ primary_artifact = outcome.artifacts.through.objects.get()
+ assert primary_artifact.identifier == registration_doi
+ assert primary_artifact.pid == registration_doi.value
+ assert primary_artifact.artifact_type == ArtifactTypes.PRIMARY
+ assert primary_artifact.primary_resource_guid == registration._id
+
+ def test_outcome_for_registration__create_copies_metadata(self, registration, registration_doi):
+ outcome = Outcome.objects.for_registration(registration, create=True)
+ assert outcome.title == registration.title
+ assert outcome.description == registration.description
+ assert outcome.category == registration.category
+
+ def test_primary_osf_resource(self, outcome, registration):
+ assert outcome.primary_osf_resource == registration
+
+
+@pytest.mark.django_db
+class TestOutcomeArtifact:
+
+ @pytest.fixture
+ def outcome(self, registration_doi):
+ outcome = Outcome.objects.create()
+ OutcomeArtifact.objects.create(
+ outcome=outcome,
+ identifier=registration_doi,
+ artifact_type=ArtifactTypes.PRIMARY
+ )
+ return outcome
+
+ @pytest.fixture
+ def project_doi(self):
+ project = ProjectFactory()
+ return Identifier.objects.create(
+ referent=project,
+ value=TEST_PROJECT_DOI,
+ category='doi'
+ )
+
+ @pytest.fixture
+ def external_doi(self):
+ return Identifier.objects.create(
+ value=TEST_EXTERNAL_DOI,
+ category='doi'
+ )
+
+ def test_get_artifacts_for_registration(self, outcome, registration, project_doi, external_doi):
+ assert not OutcomeArtifact.objects.for_registration(registration).exists()
+
+ project_artifact = outcome.artifact_metadata.create(
+ identifier=project_doi, artifact_type=ArtifactTypes.MATERIALS
+ )
+ external_artifact = outcome.artifact_metadata.create(
+ identifier=external_doi, artifact_type=ArtifactTypes.SUPPLEMENTS
+ )
+
+ # Add another Artifact for one of the identifiers to make sure it doesn't get picked up, too
+ bogus_outcome = Outcome.objects.create()
+ bogus_outcome.artifact_metadata.create(
+ identifier=external_doi, artifact_type=ArtifactTypes.ANALYTIC_CODE
+ )
+
+ registration_artifacts = OutcomeArtifact.objects.for_registration(registration)
+ # Registration artifact should not appear in the list
+ assert registration_artifacts.count() == 2
+
+ retrieved_project_artifact = registration_artifacts.get(identifier=project_doi)
+ assert retrieved_project_artifact == project_artifact
+ assert retrieved_project_artifact.pid == TEST_PROJECT_DOI
+ assert retrieved_project_artifact.primary_resource_guid == registration._id
+
+ retrieved_external_artifact = registration_artifacts.get(identifier=external_doi)
+ assert retrieved_external_artifact == external_artifact
+ assert retrieved_external_artifact.pid == TEST_EXTERNAL_DOI
+ assert retrieved_external_artifact.primary_resource_guid == registration._id
+
+ def test_update_identifier__get_existing_identifier(self, outcome, project_doi, external_doi):
+ test_artifact = outcome.artifact_metadata.create(artifact_type=ArtifactTypes.DATA)
+ test_artifact.update_identifier(new_pid_value=TEST_PROJECT_DOI)
+ assert test_artifact.identifier == project_doi
+
+ def test_update_identifier__create_new_identifier(self, outcome, project_doi):
+ assert not Identifier.objects.filter(value=TEST_EXTERNAL_DOI).exists()
+
+ test_artifact = outcome.artifact_metadata.create(artifact_type=ArtifactTypes.DATA)
+ test_artifact.update_identifier(new_pid_value=TEST_EXTERNAL_DOI)
+
+ assert test_artifact.identifier.value == TEST_EXTERNAL_DOI
+ assert Identifier.objects.filter(value=TEST_EXTERNAL_DOI).exists()
+
+ def test_update_identifier__deletes_previous_identifier_if_unreferenced(self, outcome, project_doi, external_doi):
+ assert Identifier.objects.filter(value=TEST_EXTERNAL_DOI).exists()
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=external_doi, artifact_type=ArtifactTypes.DATA
+ )
+ assert test_artifact.identifier != project_doi
+
+ test_artifact.update_identifier(new_pid_value=project_doi.value)
+ assert test_artifact.identifier == project_doi
+ assert not Identifier.objects.filter(value=TEST_EXTERNAL_DOI).exists()
+
+ def test_update_identifier__keeps_previous_identifier_if_osf_referent_exists(self, outcome, project_doi):
+ assert Identifier.objects.filter(value=TEST_PROJECT_DOI).exists()
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=project_doi, artifact_type=ArtifactTypes.DATA
+ )
+
+ test_artifact.update_identifier(new_pid_value=TEST_EXTERNAL_DOI)
+ assert test_artifact.identifier != project_doi
+ assert Identifier.objects.filter(value=TEST_PROJECT_DOI).exists()
+
+ def test_update_identifier__keeps_previous_identifier_if_part_of_other_outcomes(
+ self, outcome, project_doi, external_doi
+ ):
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=external_doi, artifact_type=ArtifactTypes.DATA
+ )
+ alternate_outcome = Outcome.objects.create()
+ alternate_outcome.artifact_metadata.create(
+ identifier=external_doi, artifact_type=ArtifactTypes.ANALYTIC_CODE
+ )
+
+ test_artifact.update_identifier(new_pid_value=project_doi.value)
+ assert test_artifact.identifier == project_doi
+ assert Identifier.objects.filter(value=TEST_EXTERNAL_DOI).exists()
+
+ def test_update_identifier__no_change_if_same_pid(self, outcome, project_doi):
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=project_doi, artifact_type=ArtifactTypes.DATA
+ )
+
+ test_artifact.update_identifier(new_pid_value=project_doi.value)
+ assert test_artifact.identifier == project_doi
+
+ @pytest.mark.parametrize('empty_value', ['', None])
+ def test_update_identifier__raises_if_empty_pid(self, outcome, project_doi, empty_value):
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=project_doi, artifact_type=ArtifactTypes.DATA
+ )
+
+ with pytest.raises(NoPIDError):
+ test_artifact.update_identifier(new_pid_value=empty_value)
+
+ def test_finalize__raises__missing_identifier(self, outcome):
+ test_artifact = outcome.artifact_metadata.create(artifact_type=ArtifactTypes.DATA)
+
+ with pytest.raises(CannotFinalizeArtifactError) as caught:
+ test_artifact.finalize()
+ assert caught.value.incomplete_fields == ['identifier__value']
+
+ def test_finalize__raises__missing_identifier_value(self, outcome, project_doi):
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=project_doi, artifact_type=ArtifactTypes.DATA
+ )
+ project_doi.value = ''
+ project_doi.save()
+
+ with pytest.raises(CannotFinalizeArtifactError) as caught:
+ test_artifact.finalize()
+ assert caught.value.incomplete_fields == ['identifier__value']
+
+ def test_finalize__raises__missing_artifact_type(self, outcome, project_doi):
+ test_artifact = outcome.artifact_metadata.create(identifier=project_doi)
+
+ with pytest.raises(CannotFinalizeArtifactError) as caught:
+ test_artifact.finalize()
+ assert caught.value.incomplete_fields == ['artifact_type']
+
+ def test_finalize__raises__missing_both(self, outcome):
+ test_artifact = outcome.artifact_metadata.create()
+
+ with pytest.raises(CannotFinalizeArtifactError) as caught:
+ test_artifact.finalize()
+ assert caught.value.incomplete_fields == ['identifier__value', 'artifact_type']
+
+ def test_delete_artifact__deletes_from_db_if_not_finalized(self, outcome, project_doi):
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=project_doi, artifact_type=ArtifactTypes.DATA, finalized=False
+ )
+
+ test_artifact.delete()
+ assert not OutcomeArtifact.objects.filter(id=test_artifact.id).exists()
+
+ def test_delete_artifact__sets_deleted_if_finalized(self, outcome, project_doi):
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=project_doi, artifact_type=ArtifactTypes.DATA, finalized=True
+ )
+ assert not test_artifact.deleted
+
+ test_artifact.delete()
+ assert test_artifact.deleted
+ assert OutcomeArtifact.objects.filter(id=test_artifact.id).exists()
+
+ @pytest.mark.parametrize('is_finalized', [True, False])
+ def test_delete_artifact__deletes_identifier_if_unreferenced(self, outcome, external_doi, is_finalized):
+ assert Identifier.objects.filter(value=TEST_EXTERNAL_DOI).exists()
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=external_doi, artifact_type=ArtifactTypes.DATA, finalized=is_finalized
+ )
+
+ test_artifact.delete()
+ assert not Identifier.objects.filter(value=TEST_EXTERNAL_DOI).exists()
+
+ @pytest.mark.parametrize('is_finalized', [True, False])
+ def test_delete_artifact__keeps_identifier_if_osf_referent_exists(self, outcome, project_doi, is_finalized):
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=project_doi, artifact_type=ArtifactTypes.DATA, finalized=is_finalized
+ )
+
+ test_artifact.delete()
+ assert Identifier.objects.filter(value=TEST_PROJECT_DOI).exists()
+
+ @pytest.mark.parametrize('is_finalized', [True, False])
+ def test_delete_artifact__keeps_identifier_if_part_of_other_outcomes(self, outcome, external_doi, is_finalized):
+ test_artifact = outcome.artifact_metadata.create(
+ identifier=external_doi, artifact_type=ArtifactTypes.DATA, finalized=is_finalized
+ )
+
+ alternate_outcome = Outcome.objects.create()
+ alternate_outcome.artifact_metadata.create(
+ identifier=external_doi, artifact_type=ArtifactTypes.ANALYTIC_CODE
+ )
+
+ test_artifact.delete()
+ assert Identifier.objects.filter(value=TEST_EXTERNAL_DOI).exists()
diff --git a/package.json b/package.json
index 16548b3e37b..8e41d97c64b 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "OSF",
- "version": "22.04.0",
+ "version": "22.06.0",
"description": "Facilitating Open Science",
"repository": "https://github.com/CenterForOpenScience/osf.io",
"author": "Center for Open Science",
diff --git a/requirements.txt b/requirements.txt
index e331d5be272..f814c65a0e3 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -63,7 +63,7 @@ pyjwe==1.0.0
# Required by pyjwe and ndg-httpsclient
# Building wheel for cryptography >= 3.4.0 requires a Rust version incompatible with Docker base image.
cryptography==3.3.2
-jsonschema==2.6.0
+jsonschema==3.1.1
django-guardian==1.4.9
# Admin requirements
@@ -72,7 +72,7 @@ django-password-reset==1.0
sendgrid-django==2.0.0
# Analytics requirements
-keen==0.5.1
+keen==0.7.0
maxminddb-geolite2==2018.308
# OSF models
@@ -89,7 +89,8 @@ django-bulk-update==2.2.0
transitions==0.8.2
# identifiers
-datacite==1.0.1
+datacite==1.1.2
+
# Metrics
django-elasticsearch-metrics==5.0.0
diff --git a/scripts/populate_institutions.py b/scripts/populate_institutions.py
index 8eac30bd88d..647394084d5 100644
--- a/scripts/populate_institutions.py
+++ b/scripts/populate_institutions.py
@@ -208,7 +208,7 @@ def main(default_args=False):
{
'_id': 'cfa',
'name': 'Center for Astrophysics | Harvard & Smithsonian',
- 'description': 'Open Source Project Management Tools for the CfA Community: About OSF | Research at the CfA | CfA Library | Get Help',
+ 'description': 'Open Source Project Management Tools for the CfA Community: About OSF | Research at the CfA | CfA Library | Get Help',
'banner_name': 'cfa-banner.png',
'logo_name': 'cfa-shield.png',
'login_url': None,
@@ -353,6 +353,23 @@ def main(default_args=False):
'email_domains': ['esipfed.org'],
'delegation_protocol': '',
},
+ {
+ '_id': 'eur',
+ 'name': 'Erasmus University Rotterdam',
+ 'description': 'EUR Data Policy | '
+ 'CyberSecurity at EUR | '
+ 'EUR Data Classification | '
+ 'EUR Data Classification (Examples) | '
+ 'EUR OSF Research Guidelines | '
+ 'Contact',
+ 'banner_name': 'eur-banner.png',
+ 'logo_name': 'eur-shield.png',
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('https://sts.windows.net/715902d6-f63e-4b8d-929b-4bb170bad492/')),
+ 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('https://osf.io/goodbye')),
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'saml-shib',
+ },
{
'_id': 'ferris',
'name': 'Ferris State University',
@@ -572,6 +589,18 @@ def main(default_args=False):
'email_domains': [],
'delegation_protocol': 'saml-shib',
},
+ {
+ '_id': 'nationalmaglab',
+ 'name': 'National High Magnetic Field Laboratory',
+ 'description': 'This platform is provided to enable collaboration, sharing, and dissemination of research products from the National High Magnetic Field Laboratory according to the principles of FAIR and open science. All public projects must adhere to National MagLab policies & procedures related to confidentiality and proper data management.',
+ 'banner_name': 'nationalmaglab-banner.png',
+ 'logo_name': 'nationalmaglab-shield.png',
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('https://idp.fsu.edu')),
+ 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('https://osf.io/goodbye')),
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'saml-shib',
+ },
{
'_id': 'nesta',
'name': 'Nesta',
@@ -632,6 +661,19 @@ def main(default_args=False):
'email_domains': [],
'delegation_protocol': 'saml-shib',
},
+ {
+ '_id': 'oxford',
+ 'name': 'University of Oxford',
+ 'description': '',
+ 'banner_name': 'oxford-banner.png',
+ 'logo_name': 'oxford-shield.png',
+ 'login_url': None,
+ 'logout_url': None,
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'via-orcid',
+ 'orcid_record_verified_source': 'ORCID Integration at the University of Oxford',
+ },
{
'_id': 'pu',
'name': 'Princeton University',
@@ -644,6 +686,18 @@ def main(default_args=False):
'email_domains': [],
'delegation_protocol': 'saml-shib',
},
+ {
+ '_id': 'purdue',
+ 'name': 'Purdue University',
+ 'description': 'This open scholarship platform is provided by Purdue University Libraries in partnership with the University\'s Graduate School, Regulatory Affairs, and Research Integrity Office.
All projects must adhere to Purdue\'s Information security, Human subjects research policies, and related data classification and handling procedures. Associated guidance on regulations is available via the Responsible Conductof Research website and the Research Integrity Office. For questions and support please reach out to Purdue\'s OSF contact.',
+ 'banner_name': 'purdue-banner.png',
+ 'logo_name': 'purdue-shield.png',
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('https://idp.purdue.edu/idp/shibboleth')),
+ 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('https://osf.io/goodbye')),
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'saml-shib',
+ },
{
'_id': 'sc',
'name': 'University of South Carolina Libraries',
@@ -651,7 +705,7 @@ def main(default_args=False):
'banner_name': 'sc-banner.png',
'logo_name': 'sc-shield.png',
'login_url': SHIBBOLETH_SP_LOGIN.format(
- encode_uri_component('urn:mace:incommon:sc.edu')),
+ encode_uri_component('https://cas.auth.sc.edu/cas/idp')),
'logout_url': SHIBBOLETH_SP_LOGOUT.format(
encode_uri_component('https://osf.io/goodbye')),
'domains': ['osf.sc.edu'],
@@ -1067,6 +1121,33 @@ def main(default_args=False):
'email_domains': ['yahoo.com'],
'delegation_protocol': '',
},
+ {
+ '_id': 'oxford',
+ 'name': 'University of Oxford [Stage]',
+ 'description': 'Here is the place to put in links to other resources, security and data policies, research guidelines, and/or a contact for user support within your institution.',
+ 'banner_name': 'placeholder-banner.png',
+ 'logo_name': 'placeholder-shield.png',
+ 'login_url': None,
+ 'logout_url': None,
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'via-orcid',
+ 'orcid_record_verified_source': 'ORCID Integration at the University of Oxford',
+ },
+ {
+ '_id': 'osftype1',
+ 'name': 'Fake "via-ORCiD" Institution [Stage]',
+ 'description': 'Fake OSF Institution Type 1. This institution uses ORCiD SSO for login and its user '
+ 'affiliation is retrieved from ORCiD public record.',
+ 'banner_name': 'placeholder-banner.png',
+ 'logo_name': 'placeholder-shield.png',
+ 'login_url': None,
+ 'logout_url': None,
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'via-orcid',
+ 'orcid_record_verified_source': 'OSF Integration',
+ },
],
'stage2': [
{
@@ -1097,6 +1178,18 @@ def main(default_args=False):
},
],
'test': [
+ {
+ '_id': 'osfidemo',
+ 'name': 'OSF Demo Institution',
+ 'description': 'Here is the place to put in links to other resources, security and data policies, research guidelines, and/or a contact for user support within your institution.',
+ 'banner_name': 'placeholder-banner.png',
+ 'logo_name': 'placeholder-shield.png',
+ 'login_url': None,
+ 'logout_url': None,
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': '',
+ },
{
'_id': 'a2jlab',
'name': 'Access to Justice Lab [Test]',
@@ -1224,7 +1317,7 @@ def main(default_args=False):
{
'_id': 'cfa',
'name': 'Center for Astrophysics | Harvard & Smithsonian [Test]',
- 'description': 'Open Source Project Management Tools for the CfA Community: About OSF | Research at the CfA | CfA Library | Get Help',
+ 'description': 'Open Source Project Management Tools for the CfA Community: About OSF | Research at the CfA | CfA Library | Get Help',
'banner_name': 'cfa-banner.png',
'logo_name': 'cfa-shield.png',
'login_url': None,
@@ -1369,6 +1462,23 @@ def main(default_args=False):
'email_domains': ['esipfed.org'],
'delegation_protocol': '',
},
+ {
+ '_id': 'eur',
+ 'name': 'Erasmus University Rotterdam [Test]',
+ 'description': 'EUR Data Policy | '
+ 'CyberSecurity at EUR | '
+ 'EUR Data Classification | '
+ 'EUR Data Classification (Examples) | '
+ 'EUR OSF Research Guidelines | '
+ 'Contact',
+ 'banner_name': 'eur-banner.png',
+ 'logo_name': 'eur-shield.png',
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('https://sts.windows.net/715902d6-f63e-4b8d-929b-4bb170bad492/')),
+ 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('https://test.osf.io/goodbye')),
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'saml-shib',
+ },
{
'_id': 'ferris',
'name': 'Ferris State University [Test]',
@@ -1588,6 +1698,18 @@ def main(default_args=False):
'email_domains': [],
'delegation_protocol': 'saml-shib',
},
+ {
+ '_id': 'nationalmaglab',
+ 'name': 'National High Magnetic Field Laboratory [Test]',
+ 'description': 'This platform is provided to enable collaboration, sharing, and dissemination of research products from the National High Magnetic Field Laboratory according to the principles of FAIR and open science. All public projects must adhere to National MagLab policies & procedures related to confidentiality and proper data management.',
+ 'banner_name': 'nationalmaglab-banner.png',
+ 'logo_name': 'nationalmaglab-shield.png',
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('https://idp.fsu.edu')),
+ 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('https://test.osf.io/goodbye')),
+ 'domains': ['test-osf-nationalmaglab.cos.io'],
+ 'email_domains': [],
+ 'delegation_protocol': 'saml-shib',
+ },
{
'_id': 'nesta',
'name': 'Nesta [Test]',
@@ -1648,6 +1770,19 @@ def main(default_args=False):
'email_domains': [],
'delegation_protocol': 'saml-shib',
},
+ {
+ '_id': 'oxford',
+ 'name': 'University of Oxford [Test]',
+ 'description': '',
+ 'banner_name': 'oxford-banner.png',
+ 'logo_name': 'oxford-shield.png',
+ 'login_url': None,
+ 'logout_url': None,
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'via-orcid',
+ 'orcid_record_verified_source': 'ORCID Integration at the University of Oxford',
+ },
{
'_id': 'pu',
'name': 'Princeton University [Test]',
@@ -1660,6 +1795,18 @@ def main(default_args=False):
'email_domains': [],
'delegation_protocol': 'saml-shib',
},
+ {
+ '_id': 'purdue',
+ 'name': 'Purdue University [Test]',
+ 'description': 'This open scholarship platform is provided by Purdue University Libraries in partnership with the University\'s Graduate School, Regulatory Affairs, and Research Integrity Office.
All projects must adhere to Purdue\'s Information security, Human subjects research policies, and related data classification and handling procedures. Associated guidance on regulations is available via the Responsible Conductof Research website and the Research Integrity Office. For questions and support please reach out to Purdue\'s OSF contact.',
+ 'banner_name': 'purdue-banner.png',
+ 'logo_name': 'purdue-shield.png',
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('https://idp.purdue.edu/idp/shibboleth')),
+ 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('https://test.osf.io/goodbye')),
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'saml-shib',
+ },
{
'_id': 'sc',
'name': 'University of South Carolina Libraries [Test]',
@@ -1667,7 +1814,7 @@ def main(default_args=False):
'banner_name': 'sc-banner.png',
'logo_name': 'sc-shield.png',
'login_url': SHIBBOLETH_SP_LOGIN.format(
- encode_uri_component('urn:mace:incommon:sc.edu')),
+ encode_uri_component('https://cas.auth.sc.edu/cas/idp')),
'logout_url': SHIBBOLETH_SP_LOGOUT.format(
encode_uri_component('https://test.osf.io/goodbye')),
'domains': ['test-osf-sc.cos.io'],
@@ -2051,76 +2198,144 @@ def main(default_args=False):
],
'local': [
{
- '_id': 'fake-saml-type-0',
- 'name': 'Fake SAML-auth Institution Type-0',
- 'description': 'A fake SAML-auth institution with no special features',
+ '_id': 'osftype0',
+ 'name': 'Fake CAS Institution',
+ 'description': 'Fake OSF Institution Type 0. Its SSO is done via CAS (pac4j impl) where OSF-CAS serves as '
+ 'the CAS client and the institution as the CAS server.',
+ 'banner_name': 'placeholder-banner.png',
+ 'logo_name': 'placeholder-shield.png',
+ 'login_url': None,
+ 'logout_url': None,
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'cas-pac4j',
+ },
+ {
+ '_id': 'osftype1',
+ 'name': 'Fake "via-ORCiD" Institution',
+ 'description': 'Fake OSF Institution Type 1. This institution uses ORCiD SSO for login and its user '
+ 'affiliation is retrieved from ORCiD public record.',
+ 'banner_name': 'placeholder-banner.png',
+ 'logo_name': 'placeholder-shield.png',
+ 'login_url': None,
+ 'logout_url': None,
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'via-orcid',
+ 'orcid_record_verified_source': 'OSF Integration',
+ },
+ {
+ '_id': 'osftype2',
+ 'name': 'Fake SAML Institution - Standard',
+ 'description': 'Fake OSF Institution Type 2. Its SSO is done via SAML (Shibboleth impl) where OSF-CAS '
+ 'serves as the SP and the institution as the IdP.',
'banner_name': 'placeholder-banner.png',
'logo_name': 'placeholder-shield.png',
- 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('fake-saml-idp-type-0-default')),
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('type-2-fake-saml-idp')),
'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('http://localhost:5000/goodbye')),
'domains': [],
'email_domains': [],
'delegation_protocol': 'saml-shib',
+ 'orcid_record_verified_source': '',
},
{
- '_id': 'fake-saml-type-1',
- 'name': 'Fake SAML-auth Institution Type-1',
- 'description': 'A fake SAML-auth institution that has shared SSO enabled',
+ '_id': 'osftype3',
+ 'name': 'Fake SAML Institution - Shared SSO Primary (Criteria: EQUALS_TO)',
+ 'description': 'Fake OSF Institution Type 3. Its SSO is done via SAML (Shibboleth impl) where OSF-CAS '
+ 'serves as the SP and the institution as the IdP. This institution is a primary one that '
+ 'provides shared SSO to secondary institutions.',
'banner_name': 'placeholder-banner.png',
'logo_name': 'placeholder-shield.png',
- 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('fake-saml-idp-type-1-shared-sso')),
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('type-3-fake-saml-idp')),
'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('http://localhost:5000/goodbye')),
'domains': [],
'email_domains': [],
'delegation_protocol': 'saml-shib',
},
{
- '_id': 'fake-saml-type-2',
- 'name': 'Fake SAML-auth Institution Type-2',
- 'description': 'A fake SAML-auth institution that has selective SSO enabled',
+ '_id': 'osftype4',
+ 'name': 'Fake SAML Institution - Shared SSO Secondary (Criteria: EQUALS_TO)',
+ 'description': 'Fake OSF Institution Type 3. Its SSO is done via SAML (Shibboleth impl) where OSF-CAS '
+ 'serves as the SP and the institution as the IdP. This institution is a secondary one that '
+ 'uses a primary institution\'s SSO.',
'banner_name': 'placeholder-banner.png',
'logo_name': 'placeholder-shield.png',
- 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('fake-saml-idp-type-2-selective-sso')),
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('type-4-fake-saml-idp')),
'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('http://localhost:5000/goodbye')),
'domains': [],
'email_domains': [],
'delegation_protocol': 'saml-shib',
},
{
- '_id': 'fake-saml-type-3',
- 'name': 'Fake SAML-auth Institution Type-3',
- 'description': 'A fake SAML-auth institution that uses eduPersonPrimaryOrgUnitDN for department',
+ '_id': 'osftype5',
+ 'name': 'Fake SAML Institution - Selective SSO',
+ 'description': 'Fake OSF Institution Type 3. Its SSO is done via SAML (Shibboleth impl) where OSF-CAS '
+ 'serves as the SP and the institution as the IdP. This institution only allows a subset of '
+ 'users to use SSO by releasing a special attribute for them.',
'banner_name': 'placeholder-banner.png',
'logo_name': 'placeholder-shield.png',
- 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('fake-saml-idp-type-3-department-eduperson')),
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('type-5-fake-saml-idp')),
'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('http://localhost:5000/goodbye')),
'domains': [],
'email_domains': [],
'delegation_protocol': 'saml-shib',
+ 'orcid_record_verified_source': '',
},
{
- '_id': 'fake-saml-type-4',
- 'name': 'Fake SAML-auth Institution Type-4',
- 'description': 'A fake SAML-auth institution that uses a non-eduPerson attribute for department',
+ '_id': 'osftype6',
+ 'name': 'Fake SAML Institution - Department I',
+ 'description': 'Fake OSF Institution Type 3. Its SSO is done via SAML (Shibboleth impl) where OSF-CAS '
+ 'serves as the SP and the institution as the IdP. This institution provides the department '
+ 'attribute via an eduPerson attribute.',
'banner_name': 'placeholder-banner.png',
'logo_name': 'placeholder-shield.png',
- 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('fake-saml-idp-type-4-department-customized')),
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('type-6-fake-saml-idp')),
'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('http://localhost:5000/goodbye')),
'domains': [],
'email_domains': [],
'delegation_protocol': 'saml-shib',
},
{
- '_id': 'fake-cas-type-0',
- 'name': 'Fake CAS-auth Institution Type-0',
- 'description': 'A fake CAS-auth institution with no special features',
+ '_id': 'osftype7',
+ 'name': 'Fake SAML Institution - Department II',
+ 'description': 'Fake OSF Institution Type 3. Its SSO is done via SAML (Shibboleth impl) where OSF-CAS '
+ 'serves as the SP and the institution as the IdP. This institution provides the department '
+ 'attribute via a customized attribute.',
'banner_name': 'placeholder-banner.png',
'logo_name': 'placeholder-shield.png',
- 'login_url': None,
- 'logout_url': None,
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('type-7-fake-saml-idp')),
+ 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('http://localhost:5000/goodbye')),
'domains': [],
'email_domains': [],
- 'delegation_protocol': 'cas-pac4j',
+ 'delegation_protocol': 'saml-shib',
+ },
+ {
+ '_id': 'osftype8',
+ 'name': 'Fake SAML Institution - Shared SSO Primary (Criteria: CONTAINS)',
+ 'description': 'Fake OSF Institution Type 8. Its SSO is done via SAML (Shibboleth impl) where OSF-CAS '
+ 'serves as the SP and the institution as the IdP. This institution is a primary one that '
+ 'provides shared SSO to secondary institutions.',
+ 'banner_name': 'placeholder-banner.png',
+ 'logo_name': 'placeholder-shield.png',
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('type-8-fake-saml-idp')),
+ 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('http://localhost:5000/goodbye')),
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'saml-shib',
+ },
+ {
+ '_id': 'osftype9',
+ 'name': 'Fake SAML Institution - Shared SSO Secondary (Criteria: CONTAINS)',
+ 'description': 'Fake OSF Institution Type 9. Its SSO is done via SAML (Shibboleth impl) where OSF-CAS '
+ 'serves as the SP and the institution as the IdP. This institution is a secondary one that '
+ 'uses a primary institution\'s SSO.',
+ 'banner_name': 'placeholder-banner.png',
+ 'logo_name': 'placeholder-shield.png',
+ 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('type-9-fake-saml-idp')),
+ 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('http://localhost:5000/goodbye')),
+ 'domains': [],
+ 'email_domains': [],
+ 'delegation_protocol': 'saml-shib',
},
],
}
diff --git a/scripts/remove_after_use/verify_groups_guardian_migration.py b/scripts/remove_after_use/verify_groups_guardian_migration.py
deleted file mode 100644
index 4a4573e7b01..00000000000
--- a/scripts/remove_after_use/verify_groups_guardian_migration.py
+++ /dev/null
@@ -1,126 +0,0 @@
-"""Script to verify permissions have transferred post groups/guardian.
-
-"docker-compose run --rm web python3 -m scripts.remove_after_use.verify_groups_guardian_migration"
-"""
-import logging
-from random import randint
-
-from website.app import setup_django
-setup_django()
-
-from django.apps import apps
-from django.contrib.auth.models import Permission, Group
-
-from osf.utils.permissions import PERMISSIONS, reduce_permissions
-from osf.models import AbstractNode, Contributor, Preprint, Node, Registration
-from osf.models.node import NodeGroupObjectPermission
-from osf.models.preprint import PreprintGroupObjectPermission
-from osf.utils.permissions import READ, WRITE, ADMIN
-
-logger = logging.getLogger(__name__)
-logging.basicConfig(level=logging.INFO)
-
-
-def check_expected(expected, actual, error_msg):
- if expected != actual:
- logger.info('{}. Expected {} rows migrated; received {}.'.format(error_msg, expected, actual))
- else:
- logger.info('{} rows added.'.format(actual))
-
-def verify_permissions_created():
- """
- Expecting three permissions added, read, write, admin perms
- """
- expected = len(PERMISSIONS)
- actual = Permission.objects.filter(codename__in=PERMISSIONS).count()
-
- check_expected(expected, actual, 'Discepancy in Permission table.')
-
-def verify_auth_groups():
- """
- Expecting three groups added for every AbstractNode - read/write/admin
- """
- expected = AbstractNode.objects.count() * 3
- actual = Group.objects.filter(name__icontains='node_').count()
-
- check_expected(expected, actual, 'Discepancy in auth_group table.')
-
-def verify_expected_node_group_object_permission_counts():
- """
- For every AbstactNode, three Django groups - admin, write, read are created.
- Admin group gets admin/write/read perms, write - write/read, and read: read.
- So for every node, 6 line items added to NodeGroupObjectPermission. Linking
- these groups with their permissions to the given node.
- """
- expected_nodegroupobjperm_count = AbstractNode.objects.count() * 6
- actual_nodegroupobjperm_count = NodeGroupObjectPermission.objects.count()
-
- check_expected(expected_nodegroupobjperm_count, actual_nodegroupobjperm_count, 'Discrepancy in NodeGroupObjectPermission table.')
-
-def verify_expected_contributor_migration():
- """
- Based on contributor admin/write/read columns, users are migrated to the osfgroupuser table and added to the appropriate Django group.
- """
- OSFUserGroup = apps.get_model('osf', 'osfuser_groups')
- expected = Contributor.objects.count()
- actual = OSFUserGroup.objects.filter(group__name__icontains='node_').count()
- check_expected(expected, actual, 'Discrepancy in contributor migration to OSFUserGroup table.')
-
-def verify_preprint_foreign_key_migration():
- expected_preprintgroupobjperm_count = Preprint.objects.count() * 6
- actual_preprintgroupobjperm_count = PreprintGroupObjectPermission.objects.count()
-
- check_expected(expected_preprintgroupobjperm_count, actual_preprintgroupobjperm_count, 'Discrepancy in PreprintGroupObjectPermission table.')
-
-def verify_random_objects():
- resources = [Node, Registration]
- for resource in resources:
- for i in range(1,10):
- random_resource = _get_random_object(resource)
- if random_resource:
- _verify_contributor_perms(random_resource)
-
-def _verify_contributor_perms(resource):
- for user in resource.contributors:
- contrib = Contributor.objects.get(node=resource, user=user)
-
- if contrib.admin:
- if contrib.permission != ADMIN:
- _suspected_contributor_migration_error(contrib)
- elif contrib.write:
- if contrib.permission != WRITE:
- _suspected_contributor_migration_error(contrib)
- elif contrib.read:
- if contrib.permission != READ:
- _suspected_contributor_migration_error(contrib)
-
-
-def _suspected_contributor_migration_error(contrib):
- logger.info('Suspected contributor migration error on {}.'.format(contrib._id))
-
-
-def _get_random_object(model):
- model_count = model.objects.count()
- if model_count:
- return model.objects.all()[randint(1, model_count - 1)]
- return None
-
-
-def main():
- logger.info('Verifying permissions created...')
- verify_permissions_created()
- logger.info('Verifying auth groups created...')
- verify_auth_groups()
- logger.info('Verifying node groups given permissions to their nodes...')
- verify_expected_node_group_object_permission_counts()
- logger.info('Verifying contributors added to node django groups...')
- verify_expected_contributor_migration()
- logger.info('Verifying preprint perms migrated to direct foreign key table...')
- verify_preprint_foreign_key_migration()
- logger.info('Verifying a selection of random contributor permissions...')
- verify_random_objects()
- logger.info('Done!')
-
-
-if __name__ == '__main__':
- main()
diff --git a/tests/identifiers/test_crossref.py b/tests/identifiers/test_crossref.py
index e1067aeb547..4079efbec85 100644
--- a/tests/identifiers/test_crossref.py
+++ b/tests/identifiers/test_crossref.py
@@ -16,7 +16,8 @@
ProjectFactory,
PreprintFactory,
PreprintProviderFactory,
- AuthUserFactory
+ AuthUserFactory,
+ InstitutionFactory
)
from framework.flask import rm_handlers
from framework.auth.core import Auth
@@ -94,7 +95,7 @@ def test_crossref_build_doi(self, crossref_client, preprint):
def test_crossref_build_metadata(self, crossref_client, preprint):
test_email = 'test-email@osf.io'
with mock.patch('website.settings.CROSSREF_DEPOSITOR_EMAIL', test_email):
- crossref_xml = crossref_client.build_metadata(preprint, pretty_print=True)
+ crossref_xml = crossref_client.build_metadata(preprint)
root = lxml.etree.fromstring(crossref_xml)
# header
@@ -139,7 +140,7 @@ def test_metadata_for_deleted_node(self, crossref_client, preprint):
preprint.is_public = False
preprint.save()
- crossref_xml = crossref_client.build_metadata(preprint, status='unavailable')
+ crossref_xml = crossref_client.build_metadata(preprint)
root = lxml.etree.fromstring(crossref_xml)
# body
@@ -217,7 +218,7 @@ def test_metadata_for_single_name_contributor_only_has_surname(self, crossref_cl
contributor.family_name = ''
contributor.save()
- crossref_xml = crossref_client.build_metadata(preprint, pretty_print=True)
+ crossref_xml = crossref_client.build_metadata(preprint)
root = lxml.etree.fromstring(crossref_xml)
contributors = root.find('.//{%s}contributors' % crossref.CROSSREF_NAMESPACE)
@@ -236,7 +237,7 @@ def test_metadata_contributor_orcid(self, crossref_client, preprint):
}
contributor.save()
- crossref_xml = crossref_client.build_metadata(preprint, pretty_print=True)
+ crossref_xml = crossref_client.build_metadata(preprint)
root = lxml.etree.fromstring(crossref_xml)
contributors = root.find('.//{%s}contributors' % crossref.CROSSREF_NAMESPACE)
@@ -250,14 +251,15 @@ def test_metadata_contributor_orcid(self, crossref_client, preprint):
}
contributor.save()
- crossref_xml = crossref_client.build_metadata(preprint, pretty_print=True)
+ crossref_xml = crossref_client.build_metadata(preprint)
root = lxml.etree.fromstring(crossref_xml)
contributors = root.find('.//{%s}contributors' % crossref.CROSSREF_NAMESPACE)
+ # Do not send unverified ORCID to crossref
assert contributors.find('.//{%s}ORCID' % crossref.CROSSREF_NAMESPACE) is None
def test_metadata_none_license_update(self, crossref_client, preprint):
- crossref_xml = crossref_client.build_metadata(preprint, pretty_print=True)
+ crossref_xml = crossref_client.build_metadata(preprint)
root = lxml.etree.fromstring(crossref_xml)
assert root.find('.//{%s}license_ref' % crossref.CROSSREF_ACCESS_INDICATORS).text == 'https://creativecommons.org/licenses/by/4.0/legalcode'
@@ -271,7 +273,7 @@ def test_metadata_none_license_update(self, crossref_client, preprint):
preprint.set_preprint_license(license_detail, Auth(preprint.creator), save=True)
- crossref_xml = crossref_client.build_metadata(preprint, pretty_print=True)
+ crossref_xml = crossref_client.build_metadata(preprint)
root = lxml.etree.fromstring(crossref_xml)
assert root.find('.//{%s}license_ref' % crossref.CROSSREF_ACCESS_INDICATORS) is None
@@ -285,3 +287,16 @@ def test_metadata_for_non_included_relation(self, crossref_client, preprint):
xml_without_relation = crossref_client.build_metadata(preprint, include_relation=False)
root_without_relation = lxml.etree.fromstring(xml_without_relation)
assert root_without_relation.find('.//{%s}intra_work_relation' % crossref.CROSSREF_RELATIONS) is None
+
+ def test_metadata_for_affiliated_institutions(self, crossref_client, preprint):
+ institution = InstitutionFactory()
+ institution.ror_uri = 'http://ror.org/WHATisITgoodFOR/'
+ institution.save()
+ preprint.creator.affiliated_institutions.add(institution)
+ preprint.creator.save()
+
+ crossref_xml = crossref_client.build_metadata(preprint)
+ root = lxml.etree.fromstring(crossref_xml)
+ contributors = root.find('.//{%s}contributors' % crossref.CROSSREF_NAMESPACE)
+ assert contributors.find('.//{%s}institution_name' % crossref.CROSSREF_NAMESPACE).text == institution.name
+ assert contributors.find('.//{%s}institution_id' % crossref.CROSSREF_NAMESPACE).text == institution.ror_uri
\ No newline at end of file
diff --git a/tests/identifiers/test_datacite.py b/tests/identifiers/test_datacite.py
index 7f046f7ed28..6c09997e10c 100644
--- a/tests/identifiers/test_datacite.py
+++ b/tests/identifiers/test_datacite.py
@@ -21,7 +21,7 @@
class TestDataCiteClient:
@pytest.fixture()
- def datacite_client(self, registration):
+ def datacite_client(self, registration, mock_datacite):
return registration.get_doi_client()
@pytest.fixture()
@@ -70,7 +70,7 @@ def test_datacite_build_metadata(self, registration, datacite_client):
parser = lxml.etree.XMLParser(ns_clean=True, recover=True, encoding='utf-8')
root = lxml.etree.fromstring(metadata_xml, parser=parser)
xsi_location = '{http://www.w3.org/2001/XMLSchema-instance}schemaLocation'
- expected_location = 'http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4/metadata.xsd'
+ expected_location = 'http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.3/metadata.xsd'
assert root.attrib[xsi_location] == expected_location
identifier = root.find('{%s}identifier' % schema40.ns[None])
@@ -90,7 +90,7 @@ def test_datacite_build_metadata(self, registration, datacite_client):
assert resource_type.text == 'Pre-registration'
assert resource_type.attrib['resourceTypeGeneral'] == 'Text'
- def test_metadata_for_node_only_includes_visible_contribs(self, datacite_client):
+ def test_datcite_format_contributors(self, datacite_client):
visible_contrib = AuthUserFactory()
visible_contrib2 = AuthUserFactory()
visible_contrib2.given_name = u'ヽ༼ ಠ益ಠ ༽ノ'
@@ -109,12 +109,12 @@ def test_metadata_for_node_only_includes_visible_contribs(self, datacite_client)
metadata_xml = datacite_client.build_metadata(registration)
# includes visible contrib name
- assert u'{}'.format(visible_contrib.given_name) in metadata_xml
- assert u'{}'.format(visible_contrib.family_name) in metadata_xml
+ assert f'{visible_contrib.fullname}' not in metadata_xml
+ assert f'{visible_contrib.fullname}' in metadata_xml
+
+ assert f'{invisible_contrib.fullname}' in metadata_xml
+ assert f'{invisible_contrib.fullname}' not in metadata_xml
- # doesn't include invisible contrib name
- assert u'{}'.format(invisible_contrib.given_name) not in metadata_xml
- assert u'{}'.format(invisible_contrib.family_name) not in metadata_xml
@pytest.mark.django_db
diff --git a/tests/test_addons.py b/tests/test_addons.py
index 6406dc7a8cd..c67d01e75d6 100644
--- a/tests/test_addons.py
+++ b/tests/test_addons.py
@@ -17,8 +17,12 @@
from nose.tools import * # noqa
from tests.base import OsfTestCase, get_default_metaschema
from api_tests.utils import create_test_file
-from osf_tests.factories import (AuthUserFactory, ProjectFactory,
- RegistrationFactory, DraftRegistrationFactory,)
+from osf_tests.factories import (
+ AuthUserFactory,
+ ProjectFactory,
+ RegistrationFactory,
+ DraftRegistrationFactory,
+)
from website import settings
from addons.base import views
from addons.github.exceptions import ApiError
@@ -26,12 +30,8 @@
from addons.github.tests.factories import GitHubAccountFactory
from addons.osfstorage.models import OsfStorageFileNode, OsfStorageFolder, OsfStorageFile
from addons.osfstorage.tests.factories import FileVersionFactory
-<<<<<<< HEAD
-from osf.models import Session
-=======
from osf import features
-from osf.models import Session, QuickFilesNode
->>>>>>> 313e31f680f8b92fc9355a902bfc99773d64bc89
+from osf.models import Session
from osf.models import files as file_models
from osf.models.files import BaseFileNode, TrashedFileNode
from osf.utils.permissions import WRITE, READ
@@ -48,16 +48,6 @@
from waffle.testutils import override_flag
-class SetEnvironMiddleware(object):
-
- def __init__(self, app, **kwargs):
- self.app = app
- self.kwargs = kwargs
-
- def __call__(self, environ, start_response):
- environ.update(self.kwargs)
- return self.app(environ, start_response)
-
class TestAddonAuth(OsfTestCase):
@@ -167,7 +157,7 @@ def test_auth_bad_bearer_token(self, mock_cas_client):
res = self.app.get(url, headers={'Authorization': 'Bearer invalid_access_token'}, expect_errors=True)
assert_equal(res.status_code, 403)
- def test_action_downloads_marks_version_as_seen(self):
+ def test_action_render_marks_version_as_seen(self):
noncontrib = AuthUserFactory()
node = ProjectFactory(is_public=True)
test_file = create_test_file(node, self.user)
diff --git a/tests/test_institutions/__init__.py b/tests/test_institutions/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/test_institutions/education_affiliations.xml b/tests/test_institutions/education_affiliations.xml
new file mode 100644
index 00000000000..042a87b5bb7
--- /dev/null
+++ b/tests/test_institutions/education_affiliations.xml
@@ -0,0 +1,80 @@
+
+
+ 2021-04-30T01:57:42.246Z
+
+ 2021-04-30T01:57:42.246Z
+
+
+ 2021-04-30T01:57:42.246Z
+ 2021-04-30T01:57:42.246Z
+
+
+ https://orcid.org/client/6666-7777-8888-9999
+ 6666-7777-8888-9999
+ orcid.org
+
+ ORCID Integration at a Verified Institution
+
+ A Verified Department
+ A Verified Title
+
+ 2007
+ 09
+ 01
+
+
+ 2011
+ 6
+ 30
+
+
+ A Verified Institution
+
+ A Verified City
+ A Verified State
+ A Verified Country
+
+
+ 1234567890
+ RINGGOLD
+
+
+
+
+
+ 2021-04-30T01:57:42.246Z
+
+
+ 2021-04-30T01:57:42.246Z
+ 2021-04-30T01:57:42.246Z
+
+
+ https://orcid.org/1111-2222-3333-4444
+ 1111-2222-3333-4444
+ orcid.org
+
+ An ORCiD User
+
+ A Department
+ A Title
+
+ 2011
+ 09
+ 01
+
+
+ 2013
+ 6
+ 30
+
+
+ An Institution
+
+ A City
+ A State
+ A Country
+
+
+
+
+
diff --git a/tests/test_institutions/employment_affiliations.xml b/tests/test_institutions/employment_affiliations.xml
new file mode 100644
index 00000000000..05ac0a5117f
--- /dev/null
+++ b/tests/test_institutions/employment_affiliations.xml
@@ -0,0 +1,63 @@
+
+
+ 2015-04-08T11:12:05.195Z
+
+ 2015-04-08T11:12:05.195Z
+
+
+ 2015-04-08T11:12:05.195Z
+ 2015-04-08T11:12:05.195Z
+
+
+ https://orcid.org/client/6666-7777-8888-9999
+ 6666-7777-8888-9999
+ orcid.org
+
+ ORCID Integration at a Verified Institution
+
+ A Verified Department
+
+ A Verified Institution
+
+ A Verified City
+ A Verified State
+ A Verified Country
+
+
+ 1234567890
+ RINGGOLD
+
+
+
+
+
+ 2021-04-30T01:57:49.345Z
+
+
+ 2021-04-30T01:54:30.883Z
+ 2021-04-30T01:57:49.345Z
+
+
+ https://orcid.org/1111-2222-3333-4444
+ 1111-2222-3333-4444
+ orcid.org
+
+ An ORCiD User
+
+ A Department
+
+ 2015
+ 11
+ 16
+
+
+ An Institution
+
+ A City
+ A State
+ A Country
+
+
+
+
+
diff --git a/tests/test_institutions/test_affiliation_via_orcid.py b/tests/test_institutions/test_affiliation_via_orcid.py
new file mode 100644
index 00000000000..71189a3bbcd
--- /dev/null
+++ b/tests/test_institutions/test_affiliation_via_orcid.py
@@ -0,0 +1,348 @@
+import mock
+import os
+import pytest
+
+from lxml import etree
+from requests.models import Response
+
+from framework.auth import tasks
+from osf.models.institution import IntegrationType
+from osf_tests.factories import UserFactory, InstitutionFactory
+from tests.base import fake
+from website.settings import ORCID_RECORD_EDUCATION_PATH, ORCID_RECORD_EMPLOYMENT_PATH
+
+
+@pytest.mark.django_db
+class TestInstitutionAffiliationViaOrcidSso:
+
+ @pytest.fixture()
+ def response_content_educations(self):
+ with open(os.path.join(os.path.dirname(__file__), 'education_affiliations.xml'), 'rb') as fp:
+ return fp.read()
+
+ @pytest.fixture()
+ def response_content_employments(self):
+ with open(os.path.join(os.path.dirname(__file__), 'employment_affiliations.xml'), 'rb') as fp:
+ return fp.read()
+
+ @pytest.fixture()
+ def xml_data_educations(self, response_content_educations):
+ return etree.XML(response_content_educations)
+
+ @pytest.fixture()
+ def xml_data_employments(self, response_content_employments):
+ return etree.XML(response_content_employments)
+
+ @pytest.fixture()
+ def orcid_id_verified(self):
+ return '1111-2222-3333-4444'
+
+ @pytest.fixture()
+ def orcid_id_link(self):
+ return fake.ean()
+
+ @pytest.fixture()
+ def orcid_id_create(self):
+ return fake.ean()
+
+ @pytest.fixture()
+ def orcid_id_random(self):
+ return fake.ean()
+
+ @pytest.fixture()
+ def user_with_orcid_id_verified(self, orcid_id_verified):
+ return UserFactory(external_identity={'ORCID': {orcid_id_verified: 'VERIFIED'}})
+
+ @pytest.fixture()
+ def user_with_orcid_id_link(self, orcid_id_link):
+ return UserFactory(external_identity={'ORCID': {orcid_id_link: 'LINK'}})
+
+ @pytest.fixture()
+ def user_with_orcid_id_create(self, orcid_id_create):
+ return UserFactory(external_identity={'ORCID': {orcid_id_create: 'CREATE'}})
+
+ @pytest.fixture()
+ def user_without_orcid_id(self):
+ return UserFactory()
+
+ @pytest.fixture()
+ def eligible_institution(self):
+ institution = InstitutionFactory()
+ institution.delegation_protocol = IntegrationType.AFFILIATION_VIA_ORCID.value
+ institution.orcid_record_verified_source = 'ORCID Integration at a Verified Institution'
+ institution.save()
+ return institution
+
+ @pytest.fixture()
+ def another_eligible_institution(self):
+ institution = InstitutionFactory()
+ institution.delegation_protocol = IntegrationType.AFFILIATION_VIA_ORCID.value
+ institution.orcid_record_verified_source = 'ORCID Integration for another Verified Institution'
+ institution.save()
+ return institution
+
+ @pytest.fixture()
+ def user_verified_and_affiliated(self, orcid_id_verified, eligible_institution):
+ user = UserFactory(external_identity={'ORCID': {orcid_id_verified: 'VERIFIED'}})
+ user.affiliated_institutions.add(eligible_institution)
+ return user
+
+ @mock.patch('framework.auth.tasks.check_institution_affiliation')
+ @mock.patch('framework.auth.tasks.verify_user_orcid_id')
+ def test_update_affiliation_for_orcid_sso_users_new_affiliation(
+ self,
+ mock_verify_user_orcid_id,
+ mock_check_institution_affiliation,
+ user_with_orcid_id_verified,
+ orcid_id_verified,
+ eligible_institution,
+ ):
+ mock_verify_user_orcid_id.return_value = True
+ mock_check_institution_affiliation.return_value = eligible_institution
+ assert eligible_institution not in user_with_orcid_id_verified.affiliated_institutions.all()
+ tasks.update_affiliation_for_orcid_sso_users(user_with_orcid_id_verified._id, orcid_id_verified)
+ assert eligible_institution in user_with_orcid_id_verified.affiliated_institutions.all()
+
+ @mock.patch('framework.auth.tasks.check_institution_affiliation')
+ @mock.patch('framework.auth.tasks.verify_user_orcid_id')
+ def test_update_affiliation_for_orcid_sso_users_existing_affiliation(
+ self,
+ mock_verify_user_orcid_id,
+ mock_check_institution_affiliation,
+ user_verified_and_affiliated,
+ orcid_id_verified,
+ eligible_institution,
+ ):
+ mock_verify_user_orcid_id.return_value = True
+ mock_check_institution_affiliation.return_value = eligible_institution
+ assert eligible_institution in user_verified_and_affiliated.affiliated_institutions.all()
+ tasks.update_affiliation_for_orcid_sso_users(user_verified_and_affiliated._id, orcid_id_verified)
+ assert eligible_institution in user_verified_and_affiliated.affiliated_institutions.all()
+
+ @mock.patch('framework.auth.tasks.check_institution_affiliation')
+ @mock.patch('framework.auth.tasks.verify_user_orcid_id')
+ def test_update_affiliation_for_orcid_sso_users_verification_failed(
+ self,
+ mock_verify_user_orcid_id,
+ mock_check_institution_affiliation,
+ user_with_orcid_id_link,
+ orcid_id_link,
+ eligible_institution,
+ ):
+ mock_verify_user_orcid_id.return_value = False
+ tasks.update_affiliation_for_orcid_sso_users(user_with_orcid_id_link._id, orcid_id_link)
+ mock_check_institution_affiliation.assert_not_called()
+ assert eligible_institution not in user_with_orcid_id_link.affiliated_institutions.all()
+
+ @mock.patch('framework.auth.tasks.check_institution_affiliation')
+ @mock.patch('framework.auth.tasks.verify_user_orcid_id')
+ def test_update_affiliation_for_orcid_sso_users_institution_not_found(
+ self,
+ mock_verify_user_orcid_id,
+ mock_check_institution_affiliation,
+ user_with_orcid_id_verified,
+ orcid_id_verified,
+ eligible_institution,
+ ):
+ mock_verify_user_orcid_id.return_value = True
+ mock_check_institution_affiliation.return_value = None
+ assert eligible_institution not in user_with_orcid_id_verified.affiliated_institutions.all()
+ tasks.update_affiliation_for_orcid_sso_users(user_with_orcid_id_verified._id, orcid_id_verified)
+ assert eligible_institution not in user_with_orcid_id_verified.affiliated_institutions.all()
+
+ def test_verify_user_orcid_id_verified(self, user_with_orcid_id_verified, orcid_id_verified):
+ assert tasks.verify_user_orcid_id(user_with_orcid_id_verified, orcid_id_verified)
+
+ def test_verify_user_orcid_id_link(self, user_with_orcid_id_link, orcid_id_link):
+ assert not tasks.verify_user_orcid_id(user_with_orcid_id_link, orcid_id_link)
+
+ def test_verify_user_orcid_id_create(self, user_with_orcid_id_create, orcid_id_create):
+ assert not tasks.verify_user_orcid_id(user_with_orcid_id_create, orcid_id_create)
+
+ def test_verify_user_orcid_id_none(self, user_without_orcid_id, orcid_id_random):
+ assert not tasks.verify_user_orcid_id(user_without_orcid_id, orcid_id_random)
+
+ @mock.patch('framework.auth.tasks.get_orcid_employment_sources')
+ @mock.patch('framework.auth.tasks.get_orcid_education_sources')
+ def test_check_institution_affiliation_from_employment_sources(
+ self,
+ mock_get_orcid_education_sources,
+ mock_get_orcid_employment_sources,
+ user_with_orcid_id_verified,
+ orcid_id_verified,
+ eligible_institution,
+ ):
+ mock_get_orcid_employment_sources.return_value = [
+ eligible_institution.orcid_record_verified_source,
+ user_with_orcid_id_verified.fullname,
+ ]
+ mock_get_orcid_education_sources.return_value = [user_with_orcid_id_verified.username, ]
+ institution = tasks.check_institution_affiliation(orcid_id_verified)
+ assert institution == eligible_institution
+
+ @mock.patch('framework.auth.tasks.get_orcid_employment_sources')
+ @mock.patch('framework.auth.tasks.get_orcid_education_sources')
+ def test_check_institution_affiliation_from_education_sources(
+ self,
+ mock_get_orcid_education_sources,
+ mock_get_orcid_employment_sources,
+ user_with_orcid_id_verified,
+ orcid_id_verified,
+ eligible_institution,
+ ):
+ mock_get_orcid_employment_sources.return_value = [user_with_orcid_id_verified.fullname, ]
+ mock_get_orcid_education_sources.return_value = [
+ eligible_institution.orcid_record_verified_source,
+ user_with_orcid_id_verified.username,
+ ]
+ institution = tasks.check_institution_affiliation(orcid_id_verified)
+ assert institution == eligible_institution
+
+ @mock.patch('framework.auth.tasks.get_orcid_employment_sources')
+ @mock.patch('framework.auth.tasks.get_orcid_education_sources')
+ def test_check_institution_affiliation_no_result(
+ self,
+ mock_get_orcid_education_sources,
+ mock_get_orcid_employment_sources,
+ user_with_orcid_id_verified,
+ orcid_id_verified,
+ eligible_institution,
+ ):
+ mock_get_orcid_employment_sources.return_value = [user_with_orcid_id_verified.fullname, ]
+ mock_get_orcid_education_sources.return_value = [user_with_orcid_id_verified.username, ]
+ institution = tasks.check_institution_affiliation(orcid_id_verified)
+ assert institution is None
+
+ @mock.patch('framework.auth.tasks.get_orcid_employment_sources')
+ @mock.patch('framework.auth.tasks.get_orcid_education_sources')
+ def test_check_institution_affiliation_multiple_results_case_1(
+ self,
+ mock_get_orcid_education_sources,
+ mock_get_orcid_employment_sources,
+ user_with_orcid_id_verified,
+ orcid_id_verified,
+ eligible_institution,
+ another_eligible_institution,
+ ):
+ mock_get_orcid_employment_sources.return_value = [
+ another_eligible_institution.orcid_record_verified_source,
+ user_with_orcid_id_verified.fullname,
+ ]
+ mock_get_orcid_education_sources.return_value = [
+ eligible_institution.orcid_record_verified_source,
+ user_with_orcid_id_verified.username,
+ ]
+ institution = tasks.check_institution_affiliation(orcid_id_verified)
+ assert institution == another_eligible_institution
+
+ @mock.patch('framework.auth.tasks.get_orcid_employment_sources')
+ @mock.patch('framework.auth.tasks.get_orcid_education_sources')
+ def test_check_institution_affiliation_multiple_results_case_2(
+ self,
+ mock_get_orcid_education_sources,
+ mock_get_orcid_employment_sources,
+ user_with_orcid_id_verified,
+ orcid_id_verified,
+ eligible_institution,
+ another_eligible_institution,
+ ):
+ mock_get_orcid_employment_sources.return_value = [
+ eligible_institution.orcid_record_verified_source,
+ user_with_orcid_id_verified.fullname,
+ ]
+ mock_get_orcid_education_sources.return_value = [
+ another_eligible_institution.orcid_record_verified_source,
+ user_with_orcid_id_verified.username,
+ ]
+ institution = tasks.check_institution_affiliation(orcid_id_verified)
+ assert institution == eligible_institution
+
+ @mock.patch('framework.auth.tasks.orcid_public_api_make_request')
+ def test_get_orcid_employment_sources(
+ self,
+ mock_orcid_public_api_make_request,
+ orcid_id_verified,
+ eligible_institution,
+ xml_data_employments,
+ ):
+ mock_orcid_public_api_make_request.return_value = xml_data_employments
+ source_list = tasks.get_orcid_employment_sources(orcid_id_verified)
+ assert len(source_list) == 2
+ assert eligible_institution.orcid_record_verified_source in source_list
+ assert 'An ORCiD User' in source_list
+
+ @mock.patch('framework.auth.tasks.orcid_public_api_make_request')
+ def test_get_orcid_education_sources(
+ self,
+ mock_orcid_public_api_make_request,
+ orcid_id_verified,
+ eligible_institution,
+ xml_data_educations,
+ ):
+ mock_orcid_public_api_make_request.return_value = xml_data_educations
+ source_list = tasks.get_orcid_education_sources(orcid_id_verified)
+ assert len(source_list) == 2
+ assert eligible_institution.orcid_record_verified_source in source_list
+ assert 'An ORCiD User' in source_list
+
+ @mock.patch('requests.get')
+ def test_orcid_public_api_make_request_education_path(
+ self,
+ mock_get,
+ orcid_id_verified,
+ response_content_educations
+ ):
+ mock_response = Response()
+ mock_response.status_code = 200
+ mock_response._content = response_content_educations
+ mock_get.return_value = mock_response
+ xml_data = tasks.orcid_public_api_make_request(ORCID_RECORD_EDUCATION_PATH, orcid_id_verified)
+ assert xml_data is not None
+
+ @mock.patch('requests.get')
+ def test_orcid_public_api_make_request_employment_path(
+ self,
+ mock_get,
+ orcid_id_verified,
+ response_content_employments
+ ):
+ mock_response = Response()
+ mock_response.status_code = 200
+ mock_response._content = response_content_employments
+ mock_get.return_value = mock_response
+ xml_data = tasks.orcid_public_api_make_request(ORCID_RECORD_EMPLOYMENT_PATH, orcid_id_verified)
+ assert xml_data is not None
+
+ # For failure cases, either education or employment path is sufficient.
+ # Thus using the education path for the rest of the tests below to avoid duplicate tests
+
+ @mock.patch('requests.get')
+ @mock.patch('lxml.etree.XML')
+ def test_orcid_public_api_make_request_not_200(
+ self,
+ mock_XML,
+ mock_get,
+ orcid_id_verified,
+ response_content_educations
+ ):
+ mock_response = Response()
+ mock_response.status_code = 204
+ mock_response._content = None
+ mock_get.return_value = mock_response
+ xml_data = tasks.orcid_public_api_make_request(ORCID_RECORD_EDUCATION_PATH, orcid_id_verified)
+ assert xml_data is None
+ mock_XML.assert_not_called()
+
+ @mock.patch('requests.get')
+ def test_orcid_public_api_make_request_parsing_error(
+ self,
+ mock_get,
+ orcid_id_verified,
+ response_content_educations
+ ):
+ mock_response = Response()
+ mock_response.status_code = 200
+ mock_response._content = b'invalid_xml'
+ mock_get.return_value = mock_response
+ xml_data = tasks.orcid_public_api_make_request(ORCID_RECORD_EDUCATION_PATH, orcid_id_verified)
+ assert xml_data is None
diff --git a/tests/test_views.py b/tests/test_views.py
index cbb7668cbcd..d550f80493a 100644
--- a/tests/test_views.py
+++ b/tests/test_views.py
@@ -43,14 +43,9 @@
from framework.flask import redirect
from framework.transactions.handlers import no_auto_transaction
-from waffle.testutils import override_flag
-
from website import mailchimp_utils, mails, settings, language
-<<<<<<< HEAD
from addons.osfstorage import settings as osfstorage_settings
from osf.models import AbstractNode, NodeLog
-=======
->>>>>>> 313e31f680f8b92fc9355a902bfc99773d64bc89
from website.profile.utils import add_contributor_json, serialize_unregistered
from website.profile.views import update_osf_help_mails_subscription
from website.project.decorators import check_can_access
@@ -62,21 +57,16 @@
send_claim_email,
send_claim_registered_email,
)
-<<<<<<< HEAD
-=======
from website.settings import EXTERNAL_EMBER_APPS
->>>>>>> 313e31f680f8b92fc9355a902bfc99773d64bc89
from website.project.views.node import _should_show_wiki_widget, abbrev_authors
from website.util import api_url_for, web_url_for
from website.util import rubeus
from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag
from osf import features
from osf.utils import permissions
-<<<<<<< HEAD
from osf.models import Comment
from osf.models import OSFUser, Tag
from osf.models.spam import SpamStatus
-=======
from osf.models import (
Comment,
AbstractNode,
@@ -85,11 +75,9 @@
Tag,
SpamStatus,
NodeRelation,
- QuickFilesNode,
NotableEmailDomain
)
->>>>>>> 313e31f680f8b92fc9355a902bfc99773d64bc89
from tests.base import (
assert_is_redirect,
capture_signals,
@@ -104,10 +92,8 @@
from api_tests.utils import create_test_file
-<<<<<<< HEAD
from osf.models import NodeRelation, NotableEmailDomain
-=======
->>>>>>> 313e31f680f8b92fc9355a902bfc99773d64bc89
+
from osf_tests.factories import (
fake_email,
ApiOAuth2ApplicationFactory,
@@ -4778,7 +4764,7 @@ def test_favicon(self):
def test_getting_started_page(self):
res = self.app.get('/getting-started/')
assert_equal(res.status_code, 302)
- assert_equal(res.location, 'https://openscience.zendesk.com/hc/en-us')
+ assert_equal(res.location, 'https://help.osf.io/article/342-getting-started-on-the-osf')
def test_help_redirect(self):
res = self.app.get('/help/')
assert_equal(res.status_code,302)
diff --git a/website/identifiers/clients/crossref.py b/website/identifiers/clients/crossref.py
index 738baee4280..fb332fb36bd 100644
--- a/website/identifiers/clients/crossref.py
+++ b/website/identifiers/clients/crossref.py
@@ -14,11 +14,11 @@
logger = logging.getLogger(__name__)
-CROSSREF_NAMESPACE = 'http://www.crossref.org/schema/4.4.1'
-CROSSREF_SCHEMA_LOCATION = 'http://www.crossref.org/schema/4.4.1 http://www.crossref.org/schemas/crossref4.4.1.xsd'
+CROSSREF_NAMESPACE = 'http://www.crossref.org/schema/5.3.1'
+CROSSREF_SCHEMA_LOCATION = 'http://www.crossref.org/schema/5.3.1 http://www.crossref.org/schemas/crossref5.3.1.xsd'
CROSSREF_ACCESS_INDICATORS = 'http://www.crossref.org/AccessIndicators.xsd'
CROSSREF_RELATIONS = 'http://www.crossref.org/relations.xsd'
-CROSSREF_SCHEMA_VERSION = '4.4.1'
+CROSSREF_SCHEMA_VERSION = '5.3.1'
JATS_NAMESPACE = 'http://www.ncbi.nlm.nih.gov/JATS1'
XSI = 'http://www.w3.org/2001/XMLSchema-instance'
CROSSREF_DEPOSITOR_NAME = 'Open Science Framework'
@@ -36,19 +36,15 @@ def get_credentials(self):
return (settings.CROSSREF_USERNAME, settings.CROSSREF_PASSWORD)
def build_doi(self, preprint):
- from osf.models import PreprintProvider
-
- prefix = preprint.provider.doi_prefix or PreprintProvider.objects.get(_id='osf').doi_prefix
+ prefix = preprint.provider.doi_prefix
return settings.DOI_FORMAT.format(prefix=prefix, guid=preprint._id)
- def build_metadata(self, preprint, status='public', include_relation=True, **kwargs):
+ def build_metadata(self, preprint, include_relation=True):
"""Return the crossref metadata XML document for a given preprint as a string for DOI minting purposes
:param preprint: the preprint, or list of preprints to build metadata for
"""
- is_batch = False
if isinstance(preprint, (list, QuerySet)):
- is_batch = True
preprints = preprint
else:
preprints = [preprint]
@@ -72,10 +68,9 @@ def build_metadata(self, preprint, status='public', include_relation=True, **kwa
element.registrant('Center for Open Science')
)
# if this is a batch update, let build_posted_content determine status for each preprint
- status = status if not is_batch else None
body = element.body()
for preprint in preprints:
- body.append(self.build_posted_content(preprint, element, status, include_relation))
+ body.append(self.build_posted_content(preprint, element, include_relation))
root = element.doi_batch(
head,
@@ -83,18 +78,18 @@ def build_metadata(self, preprint, status='public', include_relation=True, **kwa
version=CROSSREF_SCHEMA_VERSION
)
root.attrib['{%s}schemaLocation' % XSI] = CROSSREF_SCHEMA_LOCATION
- return lxml.etree.tostring(root, pretty_print=kwargs.get('pretty_print', True))
+ return lxml.etree.tostring(root)
- def build_posted_content(self, preprint, element, status, include_relation):
+ def build_posted_content(self, preprint, element, include_relation):
"""Build the element for a single preprint
preprint - preprint to build posted_content for
element - namespace element to use when building parts of the XML structure
"""
- status = status or self.get_status(preprint)
posted_content = element.posted_content(
element.group_title(preprint.provider.name),
type='preprint'
)
+ status = self.get_status(preprint)
if status == 'public':
posted_content.append(element.contributors(*self._crossref_format_contributors(element, preprint)))
@@ -123,7 +118,7 @@ def build_posted_content(self, preprint, element, status, include_relation):
element.program(xmlns=CROSSREF_ACCESS_INDICATORS)
)
- if preprint.article_doi and include_relation:
+ if preprint.article_doi and preprint.article_doi != self.build_doi(preprint) and include_relation:
posted_content.append(
element.program(
element.related_item(
@@ -190,13 +185,24 @@ def _crossref_format_contributors(self, element, preprint):
person.append(element.surname(name_parts['surname']))
if name_parts.get('suffix'):
person.append(element.suffix(remove_control_characters(name_parts['suffix'])))
- if contributor.external_identity.get('ORCID'):
- orcid = list(contributor.external_identity['ORCID'].keys())[0]
- verified = list(contributor.external_identity['ORCID'].values())[0] == 'VERIFIED'
- if orcid and verified:
- person.append(
- element.ORCID('https://orcid.org/{}'.format(orcid), authenticated='true')
- )
+ affiliations = [
+ element.institution(
+ element.institution_name(institution.name),
+ element.institution_id(
+ institution.ror_uri,
+ type='ror'
+ ),
+ ) for institution in contributor.affiliated_institutions.all() if institution.ror_uri
+ ]
+ if affiliations:
+ person.append(element.affiliations(*affiliations))
+
+ orcid = contributor.get_verified_external_id('ORCID', verified_only=True)
+ if orcid:
+ person.append(
+ element.ORCID(f'https://orcid.org/{orcid}', authenticated='true')
+ )
+
contributors.append(person)
return contributors
@@ -215,25 +221,21 @@ def _build_url(self, **query):
return url.url
def create_identifier(self, preprint, category, include_relation=True):
- status = self.get_status(preprint)
-
if category == 'doi':
- metadata = self.build_metadata(preprint, status, include_relation)
+ metadata = self.build_metadata(preprint, include_relation)
doi = self.build_doi(preprint)
- filename = doi.split('/')[-1]
username, password = self.get_credentials()
logger.info('Sending metadata for DOI {}:\n{}'.format(doi, metadata))
# Crossref sends an email to CROSSREF_DEPOSITOR_EMAIL to confirm
- requests.request(
- 'POST',
+ requests.post(
self._build_url(
operation='doMDUpload',
login_id=username,
login_passwd=password,
- fname='{}.xml'.format(filename)
+ fname=f'{preprint._id}.xml'
),
- files={'file': ('{}.xml'.format(filename), metadata)},
+ files={'file': (f'{preprint._id}.xml', metadata)},
)
# Don't wait for response to confirm doi because it arrives via email.
@@ -250,15 +252,14 @@ def get_status(self, preprint):
def bulk_create(self, metadata, filename):
# Crossref sends an email to CROSSREF_DEPOSITOR_EMAIL to confirm
username, password = self.get_credentials()
- requests.request(
- 'POST',
+ requests.post(
self._build_url(
operation='doMDUpload',
login_id=username,
login_passwd=password,
- fname='{}.xml'.format(filename)
+ fname=f'{filename}.xml'
),
- files={'file': ('{}.xml'.format(filename), metadata)},
+ files={'file': (f'{filename}.xml', metadata)},
)
logger.info('Sent a bulk update of metadata to CrossRef')
diff --git a/website/identifiers/clients/datacite.py b/website/identifiers/clients/datacite.py
index 7e3276d797b..fd13a6d771d 100644
--- a/website/identifiers/clients/datacite.py
+++ b/website/identifiers/clients/datacite.py
@@ -7,7 +7,7 @@
from website.identifiers.clients.base import AbstractIdentifierClient
from website import settings
-from datacite import DataCiteMDSClient, schema40
+from datacite import DataCiteMDSClient, schema43
from django.core.exceptions import ImproperlyConfigured
from osf.metadata.utils import datacite_format_subjects, datacite_format_contributors, datacite_format_creators
@@ -32,32 +32,64 @@ def __init__(self, node):
def build_metadata(self, node):
"""Return the formatted datacite metadata XML as a string.
"""
+ non_bib_contributors = node.contributors.filter(
+ contributor__visible=False,
+ contributor__node=node.id
+ )
+
+ contributors = datacite_format_contributors(non_bib_contributors)
+ contributors.append({
+ 'nameType': 'Organizational',
+ 'contributorType': 'HostingInstitution',
+ 'contributorName': 'Open Science Framework',
+ 'name': 'Open Science Framework',
+ 'nameIdentifiers': [
+ {
+ 'name': 'Open Science Framework',
+ 'nameIdentifier': f'https://ror.org/{settings.OSF_ROR_ID}/',
+ 'nameIdentifierScheme': 'ROR',
+ },
+ {
+ 'name': 'Open Science Framework',
+ 'nameIdentifier': f'https://grid.ac/institutes/{settings.OSF_GRID_ID}/',
+ 'nameIdentifierScheme': 'GRID',
+ }
+ ],
+ })
+ date_created = node.created.date() if not node.type == 'osf.registration' else node.registered_date.date()
data = {
- 'identifier': {
- 'identifier': self.build_doi(node),
- 'identifierType': 'DOI',
- },
- 'creators': datacite_format_creators([node.creator]),
- 'contributors': datacite_format_contributors(node.visible_contributors),
+ 'identifiers': [
+ {
+ 'identifier': self.build_doi(node),
+ 'identifierType': 'DOI',
+ }
+ ],
+ 'creators': datacite_format_creators(node.visible_contributors),
+ 'contributors': contributors,
'titles': [
{'title': node.title}
],
'publisher': 'Open Science Framework',
'publicationYear': str(datetime.datetime.now().year),
- 'resourceType': {
+ 'types': {
'resourceType': 'Pre-registration' if node.type == 'osf.registration' else 'Project',
'resourceTypeGeneral': 'Text'
},
+ 'schemaVersion': 'http://datacite.org/schema/kernel-4',
'dates': [
{
- 'date': node.created.isoformat(),
+ 'date': str(date_created),
'dateType': 'Created'
},
{
- 'date': node.modified.isoformat(),
+ 'date': str(node.modified.date()),
'dateType': 'Updated'
},
+ {
+ 'date': str(datetime.datetime.now().date()),
+ 'dateType': 'Issued'
+ },
]
}
@@ -86,10 +118,10 @@ def build_metadata(self, node):
data['subjects'] = datacite_format_subjects(node)
# Validate dictionary
- assert schema40.validate(data)
+ assert schema43.validate(data)
# Generate DataCite XML from dictionary.
- return schema40.tostring(data)
+ return schema43.tostring(data)
def build_doi(self, object):
return settings.DOI_FORMAT.format(
diff --git a/website/language.py b/website/language.py
index f666856c31a..815d1e7641f 100644
--- a/website/language.py
+++ b/website/language.py
@@ -13,7 +13,7 @@
# (upon clicking primary email confirmation link)
WELCOME_MESSAGE = """
Welcome to the OSF!
-Visit our Guides to learn about creating a project, or get inspiration from popular public projects.
+Visit our Guides to learn about creating a project, or get inspiration from popular public projects.
"""
TERMS_OF_SERVICE = """
@@ -146,8 +146,8 @@
u'This {category} contains links to other projects. These links will be '
u'copied into your registration, but the projects that they link to will '
u'not be registered. If you wish to register the linked projects, they '
- u'must be registered separately. Learn more about links.'
+ u'must be registered separately. Learn more about links.'
)
BEFORE_FORK_HAS_POINTERS = (
diff --git a/website/project/decorators.py b/website/project/decorators.py
index 8f8b64a7a9e..056bf3c846e 100644
--- a/website/project/decorators.py
+++ b/website/project/decorators.py
@@ -465,7 +465,7 @@ def check_contributor_auth(node, auth, include_public, include_view_only_anon, i
redirect_url = check_key_expired(key=auth.private_key, node=node, url=request.url)
if request.headers.get('Content-Type') == 'application/json':
raise HTTPError(http_status.HTTP_401_UNAUTHORIZED)
- else:
+ elif user is None:
response = redirect(cas.get_login_url(redirect_url))
return response
diff --git a/website/project/metadata/global-flourishing-preregistration.json b/website/project/metadata/global-flourishing-preregistration.json
index 20141192174..4eb4f7d85a1 100644
--- a/website/project/metadata/global-flourishing-preregistration.json
+++ b/website/project/metadata/global-flourishing-preregistration.json
@@ -52,7 +52,7 @@
},
{
"required":true,
- "block_type":"long-text-input",
+ "block_type":"long-text-input"
},
{
"block_type":"question-label",
@@ -331,4 +331,4 @@
"display_text":"Except when not possible, I will post preprints to the appropriate or relevant preprint server (e.g., SocArXiv, medRxiv, biorxiv, and PsyArXiv)."
}
]
-}
\ No newline at end of file
+}
diff --git a/website/project/metadata/sample-schema.json b/website/project/metadata/sample-schema.json
index fa9bc258569..881c25af75d 100644
--- a/website/project/metadata/sample-schema.json
+++ b/website/project/metadata/sample-schema.json
@@ -83,7 +83,7 @@
"example_text":"This is an example to help the researcher fill out the registration. Consider adding examples, formats, or other helpful content."
},
{
- "block_type":"long-text-input",
+ "block_type":"long-text-input"
},
{
"block_type":"question-label",
@@ -108,4 +108,4 @@
"block_type":"file-input"
}
]
-}
\ No newline at end of file
+}
diff --git a/website/project/metadata/schemas.py b/website/project/metadata/schemas.py
index 431fab4b726..f9fc72c7e44 100644
--- a/website/project/metadata/schemas.py
+++ b/website/project/metadata/schemas.py
@@ -38,6 +38,9 @@
'other-studies-using-youth-data.json',
'character-lab-short-form-registration.json',
'character-lab-long-form-registration.json',
+ 'global-flourishing-preregistration.json',
+ 'global-flourishing.json',
+ 'sample-schema.json',
]
METASCHEMA_ORDERING = (
diff --git a/website/project/metadata/utils.py b/website/project/metadata/utils.py
index 0267f8e7e10..e094da63753 100644
--- a/website/project/metadata/utils.py
+++ b/website/project/metadata/utils.py
@@ -250,8 +250,8 @@ def get_options_jsonschema(options, required):
def base_metaschema(metaschema):
json_schema = {
'type': 'object',
- 'description': metaschema['description'],
- 'title': metaschema['title'],
+ 'description': metaschema.get('description'),
+ 'title': metaschema.get('title'),
'additionalProperties': False,
'properties': {
}
diff --git a/website/search/elastic_search.py b/website/search/elastic_search.py
index 77ba35a51d8..286a4ac5abb 100644
--- a/website/search/elastic_search.py
+++ b/website/search/elastic_search.py
@@ -668,21 +668,6 @@ def update_user(user, index=None):
if not user.is_active:
try:
client().delete(index=index, doc_type='user', id=user._id, refresh=True, ignore=[404])
-<<<<<<< HEAD
-=======
- # update files in their quickfiles node if the user has been marked as spam
- if user.spam_status == SpamStatus.SPAM:
- quickfiles = QuickFilesNode.objects.get_for_user(user)
- if quickfiles:
- for quickfile_id in quickfiles.files.values_list('_id', flat=True):
- client().delete(
- index=index,
- doc_type='file',
- id=quickfile_id,
- refresh=True,
- ignore=[404]
- )
->>>>>>> 313e31f680f8b92fc9355a902bfc99773d64bc89
except NotFoundError:
pass
return
diff --git a/website/settings/defaults.py b/website/settings/defaults.py
index 3607f2b6db8..38151271f17 100644
--- a/website/settings/defaults.py
+++ b/website/settings/defaults.py
@@ -341,6 +341,10 @@ def parent_dir(path):
ECSARXIV_CROSSREF_USERNAME = None
ECSARXIV_CROSSREF_PASSWORD = None
+# ror
+OSF_ROR_ID = '05d5mza29'
+OSF_GRID_ID = 'grid.466501.0'
+
# if our DOIs cannot be confirmed after X amount of days email the admin
DAYS_CROSSREF_DOIS_MUST_BE_STUCK_BEFORE_EMAIL = 2
@@ -738,6 +742,13 @@ class CeleryConfig:
'OrcidProfile': 'ORCID',
}
+ORCID_PUBLIC_API_ACCESS_TOKEN = None
+ORCID_PUBLIC_API_V3_URL = 'https://pub.orcid.org/v3.0/'
+ORCID_PUBLIC_API_REQUEST_TIMEOUT = None
+ORCID_RECORD_ACCEPT_TYPE = 'application/vnd.orcid+xml'
+ORCID_RECORD_EMPLOYMENT_PATH = '/employments'
+ORCID_RECORD_EDUCATION_PATH = '/educations'
+
# Source: https://github.com/maxd/fake_email_validator/blob/master/config/fake_domains.list
BLACKLISTED_DOMAINS = [
'0-mail.com',
diff --git a/website/static/img/institutions/banners/eur-banner.png b/website/static/img/institutions/banners/eur-banner.png
new file mode 100644
index 00000000000..8de5c979573
Binary files /dev/null and b/website/static/img/institutions/banners/eur-banner.png differ
diff --git a/website/static/img/institutions/banners/nationalmaglab-banner.png b/website/static/img/institutions/banners/nationalmaglab-banner.png
new file mode 100644
index 00000000000..3b2ea21d906
Binary files /dev/null and b/website/static/img/institutions/banners/nationalmaglab-banner.png differ
diff --git a/website/static/img/institutions/banners/oxford-banner.png b/website/static/img/institutions/banners/oxford-banner.png
new file mode 100644
index 00000000000..fe484b02c14
Binary files /dev/null and b/website/static/img/institutions/banners/oxford-banner.png differ
diff --git a/website/static/img/institutions/banners/purdue-banner.png b/website/static/img/institutions/banners/purdue-banner.png
new file mode 100644
index 00000000000..86dfa88b798
Binary files /dev/null and b/website/static/img/institutions/banners/purdue-banner.png differ
diff --git a/website/static/img/institutions/shields-rounded-corners/colorado-shield-rounded-corners.png b/website/static/img/institutions/shields-rounded-corners/colorado-shield-rounded-corners.png
index eb66ba69492..0b8e0904b88 100644
Binary files a/website/static/img/institutions/shields-rounded-corners/colorado-shield-rounded-corners.png and b/website/static/img/institutions/shields-rounded-corners/colorado-shield-rounded-corners.png differ
diff --git a/website/static/img/institutions/shields-rounded-corners/eur-shield-rounded-corners.png b/website/static/img/institutions/shields-rounded-corners/eur-shield-rounded-corners.png
new file mode 100644
index 00000000000..835839580a2
Binary files /dev/null and b/website/static/img/institutions/shields-rounded-corners/eur-shield-rounded-corners.png differ
diff --git a/website/static/img/institutions/shields-rounded-corners/nationalmaglab-shield-rounded-corners.png b/website/static/img/institutions/shields-rounded-corners/nationalmaglab-shield-rounded-corners.png
new file mode 100644
index 00000000000..e841eded429
Binary files /dev/null and b/website/static/img/institutions/shields-rounded-corners/nationalmaglab-shield-rounded-corners.png differ
diff --git a/website/static/img/institutions/shields-rounded-corners/oxford-shield-rounded-corners.png b/website/static/img/institutions/shields-rounded-corners/oxford-shield-rounded-corners.png
new file mode 100644
index 00000000000..2e271ab4ec8
Binary files /dev/null and b/website/static/img/institutions/shields-rounded-corners/oxford-shield-rounded-corners.png differ
diff --git a/website/static/img/institutions/shields-rounded-corners/purdue-shield-rounded-corners.png b/website/static/img/institutions/shields-rounded-corners/purdue-shield-rounded-corners.png
new file mode 100644
index 00000000000..125c22e11e4
Binary files /dev/null and b/website/static/img/institutions/shields-rounded-corners/purdue-shield-rounded-corners.png differ
diff --git a/website/static/img/institutions/shields/colorado-shield.png b/website/static/img/institutions/shields/colorado-shield.png
index 81cfbee684d..8ce1b8edf31 100644
Binary files a/website/static/img/institutions/shields/colorado-shield.png and b/website/static/img/institutions/shields/colorado-shield.png differ
diff --git a/website/static/img/institutions/shields/eur-shield.png b/website/static/img/institutions/shields/eur-shield.png
new file mode 100644
index 00000000000..302da7a6bdb
Binary files /dev/null and b/website/static/img/institutions/shields/eur-shield.png differ
diff --git a/website/static/img/institutions/shields/nationalmaglab-shield.png b/website/static/img/institutions/shields/nationalmaglab-shield.png
new file mode 100644
index 00000000000..c0a2e350ee4
Binary files /dev/null and b/website/static/img/institutions/shields/nationalmaglab-shield.png differ
diff --git a/website/static/img/institutions/shields/oxford-shield.png b/website/static/img/institutions/shields/oxford-shield.png
new file mode 100644
index 00000000000..0a6ad91d365
Binary files /dev/null and b/website/static/img/institutions/shields/oxford-shield.png differ
diff --git a/website/static/img/institutions/shields/purdue-shield.png b/website/static/img/institutions/shields/purdue-shield.png
new file mode 100644
index 00000000000..45bd4921956
Binary files /dev/null and b/website/static/img/institutions/shields/purdue-shield.png differ
diff --git a/website/static/js/addProjectPlugin.js b/website/static/js/addProjectPlugin.js
index 0a1c9b26da8..d67e8d9df32 100644
--- a/website/static/js/addProjectPlugin.js
+++ b/website/static/js/addProjectPlugin.js
@@ -269,7 +269,7 @@ var AddProject = {
m('i', ' This component will inherit the same license as ',
m('b', options.parentTitle),
'. ',
- m('a[target="_blank"][href="https://openscience.zendesk.com/hc/en-us/articles/360019737854"]', 'Learn more.' )
+ m('a[target="_blank"][href="https://help.osf.io/article/288-license-your-project"]', 'Learn more.' )
)
)
]): '',
diff --git a/website/static/js/anonymousLogActionsList.json b/website/static/js/anonymousLogActionsList.json
index d7ffee2ec95..4f222c86725 100644
--- a/website/static/js/anonymousLogActionsList.json
+++ b/website/static/js/anonymousLogActionsList.json
@@ -90,5 +90,9 @@
"preprint_license_updated": "A user updated the license of a preprint",
"subjects_updated": "A user updated the subjects",
"view_only_link_added": "A user created a view-only link to a project",
- "view_only_link_removed": "A user removed a view-only link to a project"
+ "view_only_link_removed": "A user removed a view-only link to a project",
+ "migrated_quickfiles": "QuickFiles were migrated into a public project",
+ "resource_identifier_added": "A Resource has been added to the Node",
+ "resource_identifier_removed": "A Resource has been removed from the Node",
+ "resource_identifier_updated": "A Resource on the Node has had its PID updated"
}
diff --git a/website/static/js/components/publicNodes.js b/website/static/js/components/publicNodes.js
index 113622335de..12141e19b5b 100644
--- a/website/static/js/components/publicNodes.js
+++ b/website/static/js/components/publicNodes.js
@@ -202,7 +202,7 @@ var PublicNodes = {
'You have no public ' + ctrl.nodeType + '.',
m('p', {}, [
'Find out how to make your ' + ctrl.nodeType + ' ',
- m('a', {'href': 'https://openscience.zendesk.com/hc/en-us/articles/360018981414', 'target': '_blank'}, 'public'),
+ m('a', {'href': 'https://help.osf.io/article/285-control-your-privacy-settings', 'target': '_blank'}, 'public'),
'.'
])
])
diff --git a/website/static/js/logActionsList.json b/website/static/js/logActionsList.json
index a6665c1f626..56d3aa925d4 100644
--- a/website/static/js/logActionsList.json
+++ b/website/static/js/logActionsList.json
@@ -100,5 +100,9 @@
"has_prereg_links_updated": "${user} has updated their preregistration data link availability to ${value}",
"prereg_links_updated": "${user} has updated their preregistration data links",
"why_no_prereg_updated": "${user} has updated their preregistration data availability statement",
- "prereg_links_info_updated": "${user} has updated their preregistration links to ${value}"
+ "prereg_links_info_updated": "${user} has updated their preregistration links to ${value}",
+ "migrated_quickfiles": "${user} had their QuickFiles migrated into ${node}",
+ "resource_identifier_added": "${user} has added a Resource with DOI ${new_identifier} to Registration ${node}",
+ "resource_identifier_removed": "${user} has removed a Resource with DOI ${obsolete_identifier} to Registration ${node}",
+ "resource_identifier_updated": "${user} has updated a Resource DOI on Registration ${node} from ${obsolete_identifier} to ${new_identifier}"
}
diff --git a/website/static/js/myProjects.js b/website/static/js/myProjects.js
index 95a4f1b59e3..94cf1424de6 100644
--- a/website/static/js/myProjects.js
+++ b/website/static/js/myProjects.js
@@ -812,10 +812,10 @@ var MyProjects = {
} else {
template = m('.db-non-load-template.m-md.p-md.osf-box',
'You have not made any registrations yet. Go to ',
- m('a', {href: 'https://openscience.zendesk.com/hc/en-us/categories/360001550953'}, 'Guides'), ' to learn how registrations work.' );
+ m('a', {href: 'https://help.osf.io/article/330-welcome-to-registrations'}, 'Guides'), ' to learn how registrations work.' );
}
} else if (lastcrumb.data.nodeType === 'preprints'){
- template = m('.db-non-load-template.m-md.p-md.osf-box', [m('span', 'You have not made any preprints yet. Learn more about preprints in the '), m('a[href="https://openscience.zendesk.com/hc/en-us/categories/360001530554"]', 'OSF Guides'), m('span', ' or '), m('a[href="/preprints/"]', 'make one now.')]);
+ template = m('.db-non-load-template.m-md.p-md.osf-box', [m('span', 'You have not made any preprints yet. Learn more about preprints in the '), m('a[href="https://help.osf.io/article/376-preprints-home-page"]', 'OSF Guides'), m('span', ' or '), m('a[href="/preprints/"]', 'make one now.')]);
} else if (lodashGet(lastcrumb, 'data.node.attributes.bookmarks')) {
template = m('.db-non-load-template.m-md.p-md.osf-box', 'You have no bookmarks. You can add projects or registrations by dragging them into your bookmarks or by clicking the Add to Bookmark button on the project or registration.');
} else {
diff --git a/website/templates/base.mako b/website/templates/base.mako
index 51508da84fd..d4963d3a3ef 100644
--- a/website/templates/base.mako
+++ b/website/templates/base.mako
@@ -172,7 +172,7 @@
diff --git a/website/templates/emails/conference_submitted.html.mako b/website/templates/emails/conference_submitted.html.mako
index cd6707586a6..60f190cf353 100644
--- a/website/templates/emails/conference_submitted.html.mako
+++ b/website/templates/emails/conference_submitted.html.mako
@@ -22,9 +22,9 @@
* Collaborators/contributors to the submission
* Charts, graphs, and data that didn't make it onto the submission
* Links to related publications or reference lists
- * Connecting other accounts, like Dropbox, Google Drive, GitHub, figshare and Mendeley via add-on integration. Learn more and read the full list of available add-ons here.
+ * Connecting other accounts, like Dropbox, Google Drive, GitHub, figshare and Mendeley via add-on integration. Learn more and read the full list of available add-ons here.
- To learn more about OSF, read the Guides.
+ To learn more about OSF, read the Guides.
Sincerely,
diff --git a/website/templates/emails/new_public_project.html.mako b/website/templates/emails/new_public_project.html.mako
index c101e5198a9..ea4ac45b88a 100644
--- a/website/templates/emails/new_public_project.html.mako
+++ b/website/templates/emails/new_public_project.html.mako
@@ -17,7 +17,7 @@
!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0],p=/^http:/.test(d.location)?'http':'https';if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src=p+'://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js,fjs);}}(document, 'script', 'twitter-wjs');
- If you would like to learn more about how to take advantage of any of these features, visit our Guides..
+ If you would like to learn more about how to take advantage of any of these features, visit our Guides..
Sincerely,
diff --git a/website/templates/emails/no_login.html.mako b/website/templates/emails/no_login.html.mako
index e0d2fe8a5c7..d691030bed6 100644
--- a/website/templates/emails/no_login.html.mako
+++ b/website/templates/emails/no_login.html.mako
@@ -14,7 +14,7 @@
You can connect to third-party services like Dropbox or Google Drive
To get started now, visit your dashboard and click on “Create a project.”
- Need help getting started with a project? Check out the OSF Help Guides or one of our recent OSF 101 webinars.
+ Need help getting started with a project? Check out the OSF Help Guides or one of our recent OSF 101 webinars.
Sincerely,
diff --git a/website/templates/emails/reviews_submission_confirmation.html.mako b/website/templates/emails/reviews_submission_confirmation.html.mako
index 64a85f209cd..bd541714347 100644
--- a/website/templates/emails/reviews_submission_confirmation.html.mako
+++ b/website/templates/emails/reviews_submission_confirmation.html.mako
@@ -57,8 +57,8 @@
Now that you've shared your ${document_type}, take advantage of more OSF features:
- Upload supplemental, materials, data, and code to an OSF project associated with your ${document_type}.
- Learn how
- - Preregister your next study. Read more
+ Learn how
+ - Preregister your next study. Read more
- Or share on social media: Tell your friends through:
diff --git a/website/templates/emails/reviews_submission_status.html.mako b/website/templates/emails/reviews_submission_status.html.mako
index d05abfd9ae3..b0af1a88b45 100644
--- a/website/templates/emails/reviews_submission_status.html.mako
+++ b/website/templates/emails/reviews_submission_status.html.mako
@@ -55,12 +55,12 @@
Now that you've shared your ${document_type}, take advantage of more OSF features:
% if has_psyarxiv_chronos_text:
- - Submit your preprint to an APA-published journal. Learn more
+ - Submit your preprint to an APA-published journal. Learn more
% endif
- Upload supplemental, materials, data, and code to the OSF project associated with your ${document_type}.
- Learn how
- - Preregister your next study. Read more
+ Learn how
+ - Preregister your next study. Read more
- Or share on social media: Tell your friends through:
diff --git a/website/templates/emails/welcome.html.mako b/website/templates/emails/welcome.html.mako
index 63d05e60424..5fafa3e29a5 100644
--- a/website/templates/emails/welcome.html.mako
+++ b/website/templates/emails/welcome.html.mako
@@ -21,34 +21,34 @@ Files can be stored in a location you specify from the available geographic regi
% endif
Store your files
-Archive your materials, data, manuscripts, or anything else associated with your work during the research process or after it is complete. Learn how.
+Archive your materials, data, manuscripts, or anything else associated with your work during the research process or after it is complete. Learn how.
Share your work
-Keep your research materials and data private, make it accessible to specific others with view-only links, or make it publicly accessible. You have full control of what parts of your research are public and what remains private. Explore privacy settings.
+Keep your research materials and data private, make it accessible to specific others with view-only links, or make it publicly accessible. You have full control of what parts of your research are public and what remains private. Explore privacy settings.
Register your research
-Create a permanent, time-stamped version of your projects and files. Do this to preregister your design and analysis plan to conduct a confirmatory study, or archive your materials, data, and analysis scripts when publishing a report. Read about registrations.
+Create a permanent, time-stamped version of your projects and files. Do this to preregister your design and analysis plan to conduct a confirmatory study, or archive your materials, data, and analysis scripts when publishing a report. Read about registrations.
Make your work citable
-Every project and file on the OSF has a permanent unique identifier, and every registration can be assigned a DOI. Citations for public projects are generated automatically so that visitors can give you credit for your research. Learn more.
+Every project and file on the OSF has a permanent unique identifier, and every registration can be assigned a DOI. Citations for public projects are generated automatically so that visitors can give you credit for your research. Learn more.
Measure your impact
-You can monitor traffic to your public projects and downloads of your public files. Discover analytics.
+You can monitor traffic to your public projects and downloads of your public files. Discover analytics.
Connect services that you use
-OSF integrates with GitHub, Dropbox, Google Drive, Box, Dataverse, figshare, Amazon S3, ownCloud, Bitbucket, GitLab, OneDrive, Mendeley, and Zotero. Link the services that you use to your OSF projects so that all parts of your research are in one place Learn about add-ons.
+OSF integrates with GitHub, Dropbox, Google Drive, Box, Dataverse, figshare, Amazon S3, ownCloud, Bitbucket, GitLab, OneDrive, Mendeley, and Zotero. Link the services that you use to your OSF projects so that all parts of your research are in one place Learn about add-ons.
Collaborate
-Add your collaborators to have a shared environment for maintaining your research materials and data and never lose files again. Start collaborating.
+Add your collaborators to have a shared environment for maintaining your research materials and data and never lose files again. Start collaborating.
-Learn more about OSF by reading the Guides.
+Learn more about OSF by reading the Guides.
Sincerely,
diff --git a/website/templates/emails/welcome_osf4i.html.mako b/website/templates/emails/welcome_osf4i.html.mako
index d4285a3ea5c..36b0bf4b3a1 100644
--- a/website/templates/emails/welcome_osf4i.html.mako
+++ b/website/templates/emails/welcome_osf4i.html.mako
@@ -21,7 +21,7 @@ Files can be stored in a location you specify from the available geographic regi
% endif
Store your files
-Archive your materials, data, manuscripts, or anything else associated with your work during the research process or after it is complete. Learn how.
+Archive your materials, data, manuscripts, or anything else associated with your work during the research process or after it is complete. Learn how.
Affiliate your projects with your institution
@@ -29,30 +29,30 @@ Associate your projects with your institution. They will be added to your instit
Share your work
-Keep your research materials and data private, make it accessible to specific others with view-only links, or make it publicly accessible. You have full control of what parts of your research are public and what remains private. Explore privacy settings.
+Keep your research materials and data private, make it accessible to specific others with view-only links, or make it publicly accessible. You have full control of what parts of your research are public and what remains private. Explore privacy settings.
Register your research
-Create a permanent, time-stamped version of your projects and files. Do this to preregister your design and analysis plan to conduct a confirmatory study, or archive your materials, data, and analysis scripts when publishing a report. Read about registrations.
+Create a permanent, time-stamped version of your projects and files. Do this to preregister your design and analysis plan to conduct a confirmatory study, or archive your materials, data, and analysis scripts when publishing a report. Read about registrations.
Make your work citable
-Every project and file on the OSF has a permanent unique identifier, and every registration can be assigned a DOI. Citations for public projects are generated automatically so that visitors can give you credit for your research. Learn more.
+Every project and file on the OSF has a permanent unique identifier, and every registration can be assigned a DOI. Citations for public projects are generated automatically so that visitors can give you credit for your research. Learn more.
Measure your impact
-You can monitor traffic to your public projects and downloads of your public files. Discover analytics.
+You can monitor traffic to your public projects and downloads of your public files. Discover analytics.
Connect services that you use
-OSF integrates with GitHub, Dropbox, Google Drive, Box, Dataverse, figshare, Amazon S3, ownCloud, Bitbucket, GitLab, OneDrive, Mendeley, and Zotero. Link the services that you use to your OSF projects so that all parts of your research are in one place Learn about add-ons.
+OSF integrates with GitHub, Dropbox, Google Drive, Box, Dataverse, figshare, Amazon S3, ownCloud, Bitbucket, GitLab, OneDrive, Mendeley, and Zotero. Link the services that you use to your OSF projects so that all parts of your research are in one place Learn about add-ons.
Collaborate
-Add your collaborators to have a shared environment for maintaining your research materials and data and never lose files again. Start collaborating.
+Add your collaborators to have a shared environment for maintaining your research materials and data and never lose files again. Start collaborating.
-Learn more about OSF by reading the Guides.
+Learn more about OSF by reading the Guides.
Sincerely,
diff --git a/website/templates/emails/welcome_osf4m.html.mako b/website/templates/emails/welcome_osf4m.html.mako
index 169d9977c00..b5920b1aafb 100644
--- a/website/templates/emails/welcome_osf4m.html.mako
+++ b/website/templates/emails/welcome_osf4m.html.mako
@@ -17,7 +17,7 @@
- You have one place to reference when looking for your research materials
- You can monitor interest in your data and materials by tracking downloads, just like you can for your ${conference} presentation.
- To learn more about how the OSF can help you manage your research, read our Guides.
+ To learn more about how the OSF can help you manage your research, read our Guides.
The best part? It’s all free! OSF is supported by the non-profit technology company, the Center for Open Science.
Best wishes,
diff --git a/website/templates/include/profile/names.mako b/website/templates/include/profile/names.mako
index 6cb78021462..988926a7bf1 100644
--- a/website/templates/include/profile/names.mako
+++ b/website/templates/include/profile/names.mako
@@ -4,13 +4,8 @@
diff --git a/website/templates/public/pages/meeting.mako b/website/templates/public/pages/meeting.mako
index d43af20a623..b279123f6c6 100644
--- a/website/templates/public/pages/meeting.mako
+++ b/website/templates/public/pages/meeting.mako
@@ -3,7 +3,7 @@
<%def name="nav()">
<%namespace name="nav_helper" file="nav.mako" />
- ${nav_helper.nav(service_name='MEETINGS', service_url='/meetings/', service_support_url='https://openscience.zendesk.com/hc/en-us/categories/360001550933')}
+ ${nav_helper.nav(service_name='MEETINGS', service_url='/meetings/', service_support_url='https://help.osf.io/article/397-osf-meetings')}
%def>
<%def name="content()">
diff --git a/website/templates/public/pages/meeting_landing.mako b/website/templates/public/pages/meeting_landing.mako
index dea6fe95726..c7d10ec7b86 100644
--- a/website/templates/public/pages/meeting_landing.mako
+++ b/website/templates/public/pages/meeting_landing.mako
@@ -4,7 +4,7 @@
<%def name="nav()">
<%namespace name="nav_helper" file="nav.mako" />
- ${nav_helper.nav(service_name='MEETINGS', service_url='/meetings/', service_support_url='https://openscience.zendesk.com/hc/en-us/categories/360001550933')}
+ ${nav_helper.nav(service_name='MEETINGS', service_url='/meetings/', service_support_url='https://help.osf.io/article/397-osf-meetings')}
%def>
<%def name="stylesheets()">
diff --git a/website/templates/util/render_nodes.mako b/website/templates/util/render_nodes.mako
index 341fe6d70b9..497e390ba92 100644
--- a/website/templates/util/render_nodes.mako
+++ b/website/templates/util/render_nodes.mako
@@ -36,7 +36,7 @@
You have no public ${pluralized_node_type}.
Find out how to make your ${pluralized_node_type}
- public.
+ public.
% elif profile is not UNDEFINED: ## On profile page and user has no public projects/components
diff --git a/website/views.py b/website/views.py
index a78c670a6f9..2fa4b047b89 100644
--- a/website/views.py
+++ b/website/views.py
@@ -31,6 +31,7 @@
from website.settings import EXTERNAL_EMBER_APPS, PROXY_EMBER_APPS, EXTERNAL_EMBER_SERVER_TIMEOUT, DOMAIN
from website.ember_osf_web.decorators import ember_flag_is_active
from website.ember_osf_web.views import use_ember_app
+from website.project.decorators import check_contributor_auth
from website.project.model import has_anonymous_link
from osf.utils import permissions
@@ -312,7 +313,20 @@ def resolve_guid(guid, suffix=None):
if isinstance(resource, DraftNode):
raise HTTPError(http_status.HTTP_404_NOT_FOUND)
+ if isinstance(resource, AbstractNode):
+ response = check_contributor_auth(
+ resource,
+ auth=Auth.from_kwargs(request.args.to_dict(), {}),
+ include_public=True,
+ include_view_only_anon=True,
+ include_groups=True
+ )
+ if response:
+ return response
+
# Stream to ember app if resource has emberized view
+ addon_paths = [f'files/{addon.short_name}' for addon in settings.ADDONS_AVAILABLE_DICT.values() if 'storage' in addon.categories] + ['files']
+
if isinstance(resource, Preprint):
if resource.provider.domain_redirect_enabled:
return redirect(resource.absolute_url, http_status.HTTP_301_MOVED_PERMANENTLY)
@@ -324,9 +338,14 @@ def resolve_guid(guid, suffix=None):
elif isinstance(resource, Registration) and suffix and suffix.rstrip('/').lower() in ('files', 'files/osfstorage') and waffle.flag_is_active(request, features.EMBER_REGISTRATION_FILES):
return stream_emberapp(EXTERNAL_EMBER_APPS['ember_osf_web']['server'], ember_osf_web_dir)
+ elif isinstance(resource, Node) and suffix and any(path.startswith(suffix.rstrip('/').lower()) for path in addon_paths) and waffle.flag_is_active(request, features.EMBER_PROJECT_FILES):
+ return stream_emberapp(EXTERNAL_EMBER_APPS['ember_osf_web']['server'], ember_osf_web_dir)
+
elif isinstance(resource, BaseFileNode) and resource.is_file and not isinstance(resource.target, Preprint):
if isinstance(resource.target, Registration) and waffle.flag_is_active(request, features.EMBER_FILE_REGISTRATION_DETAIL):
return stream_emberapp(EXTERNAL_EMBER_APPS['ember_osf_web']['server'], ember_osf_web_dir)
+ if isinstance(resource.target, Node) and waffle.flag_is_active(request, features.EMBER_FILE_PROJECT_DETAIL):
+ return stream_emberapp(EXTERNAL_EMBER_APPS['ember_osf_web']['server'], ember_osf_web_dir)
# Redirect to legacy endpoint for Nodes, Wikis etc.
url = _build_guid_url(unquote(resource.deep_url), suffix)
@@ -349,9 +368,9 @@ def redirect_howosfworks(**kwargs):
return redirect('/getting-started/')
-# redirect osf.io/getting-started to https://openscience.zendesk.com/hc/en-us
+# redirect osf.io/getting-started to https://help.osf.io/article/342-getting-started-on-the-osf
def redirect_getting_started(**kwargs):
- return redirect('https://openscience.zendesk.com/hc/en-us')
+ return redirect('https://help.osf.io/article/342-getting-started-on-the-osf')
# Redirect to home page