Skip to content

Commit

Permalink
Merge branch 'release/0.124.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
sloria committed Nov 14, 2017
2 parents 29f5194 + 56e601a commit cb7c5dc
Show file tree
Hide file tree
Showing 148 changed files with 2,521 additions and 352 deletions.
2 changes: 1 addition & 1 deletion .docker-compose.sharejs.env
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
SHAREJS_SERVER_HOST=0.0.0.0
SHAREJS_SERVER_PORT=7007
SHAREJS_DB_NAME=sharejs
SHAREJS_DB_URL=mongodb://192.168.168.167:27017/sharejs
SHAREJS_DB_URL=mongodb://192.168.168.167:27017/sharejs?ssl=true

#SHAREJS_CORS_ALLOW_ORIGIN=
#SHAREJS_SENTRY_DSN=
1 change: 1 addition & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ node_modules
*.pyc
**/*.pyc
docker*
ssl/
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -194,3 +194,4 @@ scripts/image_maniplation/test_rounded_corners.html
docker-compose.override.yml
.unison*
.docker-sync/
ssl/
5 changes: 5 additions & 0 deletions CHANGELOG
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
Changelog
*********

0.124.0 (2017-11-13)
====================

- Critical bug fixes

0.123.0 (2017-10-26)
====================

Expand Down
10 changes: 5 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@ ENV GOSU_VERSION=1.10 \
YARN_VERSION=1.1.0

# Libraries such as matplotlib require a HOME directory for cache and configuration
RUN usermod -d /home www-data \
&& chown www-data:www-data /home \
# https://github.com/nodejs/docker-node/blob/9c25cbe93f9108fd1e506d14228afe4a3d04108f/8.2/Dockerfile
# gpg keys listed at https://github.com/nodejs/node#release-team
&& set -ex \
RUN set -ex \
&& mkdir -p /var/www \
&& chown www-data:www-data /var/www \
# GOSU
&& gpg --keyserver pool.sks-keyservers.net --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4 \
&& for key in \
# https://github.com/nodejs/docker-node/blob/9c25cbe93f9108fd1e506d14228afe4a3d04108f/8.2/Dockerfile
# gpg keys listed at https://github.com/nodejs/node#release-team
# Node
9554F04D7259F04124DE6B476D5A82AC7E37093B \
94AE36675C464D64BAFA68DD7434390BDBE9B9C5 \
Expand Down
2 changes: 1 addition & 1 deletion README-docker-compose.md
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ Ubuntu: Skip install of docker-sync. instead...
_NOTE: When the various requirements installations are complete these containers will exit. You should only need to run these containers after pulling code that changes python requirements or if you update the python requirements._

2. Start Core Component Services (Detached)
- `$ docker-compose up -d elasticsearch postgres tokumx rabbitmq`
- `$ docker-compose up -d elasticsearch postgres mongo rabbitmq`

3. Remove your existing node_modules and start the assets watcher (Detached)
- `$ rm -Rf ./node_modules`
Expand Down
4 changes: 2 additions & 2 deletions addons/wiki/settings/defaults.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import datetime

import os
import pytz

from website import settings
Expand All @@ -9,7 +9,7 @@
SHAREJS_URL = '{}:{}'.format(SHAREJS_HOST, SHAREJS_PORT)

SHAREJS_DB_NAME = 'sharejs'
SHAREJS_DB_URL = 'mongodb://{}:{}/{}'.format(settings.DB_HOST, settings.DB_PORT, SHAREJS_DB_NAME)
SHAREJS_DB_URL = os.environ.get('SHAREJS_DB_URL', 'mongodb://{}:{}/{}'.format(settings.DB_HOST, settings.DB_PORT, SHAREJS_DB_NAME))

# TODO: Change to release date for wiki change
WIKI_CHANGE_DATE = datetime.datetime.utcfromtimestamp(1423760098).replace(tzinfo=pytz.utc)
3 changes: 2 additions & 1 deletion addons/wiki/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import urllib
import uuid

import ssl
from pymongo import MongoClient
import requests

Expand Down Expand Up @@ -101,7 +102,7 @@ def migrate_uuid(node, wname):

def share_db():
"""Generate db client for sharejs db"""
client = MongoClient(wiki_settings.SHAREJS_DB_URL)
client = MongoClient(wiki_settings.SHAREJS_DB_URL, ssl_cert_reqs=ssl.CERT_NONE)
return client[wiki_settings.SHAREJS_DB_NAME]


Expand Down
14 changes: 7 additions & 7 deletions admin/preprint_providers/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponse, JsonResponse
from django.views.generic import ListView, DetailView, View, CreateView, DeleteView, TemplateView, UpdateView
from django.core.management import call_command
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.forms.models import model_to_dict
from django.shortcuts import redirect
Expand All @@ -18,8 +19,7 @@

# When preprint_providers exclusively use Subject relations for creation, set this to False
SHOW_TAXONOMIES_IN_PREPRINT_PROVIDER_CREATE = True
#TODO: Add subjects back in when custom taxonomies are fully integrated
FIELDS_TO_NOT_IMPORT_EXPORT = ['access_token', 'share_source', 'subjects_acceptable', 'subjects']
FIELDS_TO_NOT_IMPORT_EXPORT = ['access_token', 'share_source']


class PreprintProviderList(PermissionRequiredMixin, ListView):
Expand Down Expand Up @@ -170,15 +170,15 @@ def get(self, request, *args, **kwargs):
cleaned_fields = {key: value for key, value in cleaned_data['fields'].iteritems() if key not in FIELDS_TO_NOT_IMPORT_EXPORT}
cleaned_fields['licenses_acceptable'] = [node_license.license_id for node_license in preprint_provider.licenses_acceptable.all()]
cleaned_fields['default_license'] = preprint_provider.default_license.license_id if preprint_provider.default_license else ''
# cleaned_fields['subjects'] = self.serialize_subjects(preprint_provider)
cleaned_fields['subjects'] = self.serialize_subjects(preprint_provider)
cleaned_data['fields'] = cleaned_fields
filename = '{}_export.json'.format(preprint_provider.name)
response = HttpResponse(json.dumps(cleaned_data), content_type='text/json')
response['Content-Disposition'] = 'attachment; filename={}'.format(filename)
return response

def serialize_subjects(self, provider):
if provider._id != 'osf':
if provider._id != 'osf' and provider.subjects.count():
result = {}
result['include'] = []
result['exclude'] = []
Expand Down Expand Up @@ -231,8 +231,9 @@ def post(self, request, *args, **kwargs):
if form.is_valid():
file_str = self.parse_file(request.FILES['file'])
file_json = json.loads(file_str)
current_fields = [f.name for f in PreprintProvider._meta.get_fields()]
# make sure not to import an exported access token for SHARE
cleaned_result = {key: value for key, value in file_json['fields'].iteritems() if key not in FIELDS_TO_NOT_IMPORT_EXPORT}
cleaned_result = {key: value for key, value in file_json['fields'].iteritems() if key not in FIELDS_TO_NOT_IMPORT_EXPORT and key in current_fields}
preprint_provider = self.create_or_update_provider(cleaned_result)
return redirect('preprint_providers:detail', preprint_provider_id=preprint_provider.id)

Expand All @@ -248,8 +249,7 @@ def get_page_provider(self):
return PreprintProvider.objects.get(id=page_provider_id)

def add_subjects(self, provider, subject_data):
from osf.management.commands.populate_custom_taxonomies import migrate
migrate(provider._id, subject_data)
call_command('populate_custom_taxonomies', '--provider', provider._id, '--data', json.dumps(subject_data))

def create_or_update_provider(self, provider_data):
provider = self.get_page_provider()
Expand Down
82 changes: 81 additions & 1 deletion admin/static/js/metrics/metrics.es6.js
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,31 @@ var getOneDayTimeframe = function(daysBack, monthsBack) {
};
};

/**
* Configure a time frame for a day x days ago (end) and y days prior to x (start)
*
* @method getVariableDayTimeframe
* @param {Integer} endDaysBack - the number of days back to set as the end day
* @param {Integer} totalDays - the number of days back to reach the start day
* @return {Object} the keen-formatted timeframe
*/
var getVariableDayTimeframe = function(endDaysBack, totalDays) {
var start = null;
var end = null;
var date = new Date();

date.setUTCDate(date.getDate() - endDaysBack);
date.setUTCHours(0, 0, 0, 0, 0);

end = date.toISOString();

date.setDate(date.getDate() - totalDays);
start = date.toISOString();
return {
"start": start,
"end": end
};
};

/**
* Configure a Title for a chart dealing with the past month or day
Expand Down Expand Up @@ -322,6 +346,14 @@ var monthlyActiveUsersQuery = new keenAnalysis.Query("count_unique", {
timezone: "UTC"
});

// Previous 30 Days Active Users
var thirtyDaysActiveUsersQuery = new keenAnalysis.Query("count_unique", {
eventCollection: "pageviews",
targetProperty: "user.id",
timeframe: "previous_30_days",
timezone: "UTC"
});

var dailyActiveUsersQuery = new keenAnalysis.Query("count_unique", {
event_collection: "pageviews",
target_property: "user.id",
Expand All @@ -336,6 +368,48 @@ var totalProjectsQuery = new keenAnalysis.Query("sum", {
timeframe: "previous_1_days",
});

// 7 Days back Active Users
var weekBackThirtyDaysActiveUsersQuery = new keenAnalysis.Query("count_unique", {
eventCollection: "pageviews",
targetProperty: "user.id",
timeframe: getVariableDayTimeframe(7, 30),
});

// 7 Days back Active Users
var weekBackDailyActiveUsersQuery = new keenAnalysis.Query("count_unique", {
eventCollection: "pageviews",
targetProperty: "user.id",
timeframe: getVariableDayTimeframe(7, 1),
});

// 28 Days back Active Users
var monthBackThirtyDaysActiveUsersQuery = new keenAnalysis.Query("count_unique", {
eventCollection: "pageviews",
targetProperty: "user.id",
timeframe: getVariableDayTimeframe(28, 30),
});

// 28 Days back Active Users
var monthBackDailyActiveUsersQuery = new keenAnalysis.Query("count_unique", {
eventCollection: "pageviews",
targetProperty: "user.id",
timeframe: getVariableDayTimeframe(28, 1),
});

// 364 Days back Active Users
var yearBackThirtyDaysActiveUsersQuery = new keenAnalysis.Query("count_unique", {
eventCollection: "pageviews",
targetProperty: "user.id",
timeframe: getVariableDayTimeframe(364, 30),
});

// 364 Days back Active Users
var yearBackDailyActiveUsersQuery = new keenAnalysis.Query("count_unique", {
eventCollection: "pageviews",
targetProperty: "user.id",
timeframe: getVariableDayTimeframe(364, 1),
});

// <+><+><+><+><+><+
// user data |
// ><+><+><+><+><+>+
Expand Down Expand Up @@ -808,7 +882,13 @@ var ActiveUserMetrics = function() {
var HealthyUserMetrics = function() {

// stickiness ratio - DAU/MAU
renderCalculationBetweenTwoQueries(dailyActiveUsersQuery, monthlyActiveUsersQuery, "#stickiness-ratio", null, "percentage");
renderCalculationBetweenTwoQueries(dailyActiveUsersQuery, thirtyDaysActiveUsersQuery, "#stickiness-ratio-1-day-ago", null, "percentage");
// stickiness ratio - DAU/MAU for 1 week ago
renderCalculationBetweenTwoQueries(weekBackDailyActiveUsersQuery, weekBackThirtyDaysActiveUsersQuery , "#stickiness-ratio-1-week-ago", null, "percentage");
// stickiness ratio - DAU/MAU for 4 weeks ago
renderCalculationBetweenTwoQueries(monthBackDailyActiveUsersQuery, monthBackThirtyDaysActiveUsersQuery , "#stickiness-ratio-4-weeks-ago", null, "percentage");
// stickiness ratio - DAU/MAU for 52 weeks ago
renderCalculationBetweenTwoQueries(yearBackDailyActiveUsersQuery, yearBackThirtyDaysActiveUsersQuery , "#stickiness-ratio-52-weeks-ago", null, "percentage");
};


Expand Down
44 changes: 42 additions & 2 deletions admin/templates/metrics/osf_metrics.html
Original file line number Diff line number Diff line change
Expand Up @@ -577,10 +577,50 @@ <h4>Project to User Ratios</h4>
Stickiness Ratio
</div>
<div class="chart-stage">
<div id="stickiness-ratio"></div>
<div id="stickiness-ratio-1-day-ago"></div>
</div>
<div class="chart-notes">
Of those that are active in the last month, how many are active daily? Daily Active Users / Monthly Active Users.
Of those that are active in the last 30 days, how many are active daily?
Formula: Daily Active Users (yesterday) / Daily Active Users previous 30 days
</div>
</div>
</div>
<div class="col-sm-4">
<div class="chart-wrapper">
<div class="chart-title">
Stickiness Ratio 1 week ago
</div>
<div class="chart-stage">
<div id="stickiness-ratio-1-week-ago"></div>
</div>
<div class="chart-notes">
Stickiness Ratio for the same day of the week 1 week ago.
</div>
</div>
</div>
<div class="col-sm-4">
<div class="chart-wrapper">
<div class="chart-title">
Stickiness Ratio 4 weeks ago
</div>
<div class="chart-stage">
<div id="stickiness-ratio-4-weeks-ago"></div>
</div>
<div class="chart-notes">
Stickiness Ratio for the same day of the week 4 weeks ago.
</div>
</div>
</div>
<div class="col-sm-4">
<div class="chart-wrapper">
<div class="chart-title">
Stickiness Ratio 52 weeks ago
</div>
<div class="chart-stage">
<div id="stickiness-ratio-52-weeks-ago"></div>
</div>
<div class="chart-notes">
Stickiness Ratio for the same day of the week 52 weeks ago.
</div>
</div>
</div>
Expand Down
29 changes: 25 additions & 4 deletions admin_tests/preprint_providers/test_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,11 +250,32 @@ def test_export_to_import_new_provider(self):

nt.assert_equal(res.status_code, 302)
nt.assert_equal(new_provider._id, 'new_id')
# nt.assert_equal(new_provider.subjects.all().count(), 1)
nt.assert_equal(new_provider.subjects.all().count(), 1)
nt.assert_equal(new_provider.licenses_acceptable.all().count(), 1)
# nt.assert_equal(new_provider.subjects.all()[0].text, self.subject.text)
nt.assert_equal(new_provider.subjects.all()[0].text, self.subject.text)
nt.assert_equal(new_provider.licenses_acceptable.all()[0].license_id, 'NONE')

def test_export_to_import_new_provider_with_models_out_of_sync(self):
update_taxonomies('test_bepress_taxonomy.json')

res = self.view.get(self.request)
content_dict = json.loads(res.content)

content_dict['fields']['_id'] = 'new_id'
content_dict['fields']['new_field'] = 'this is a new field, not in the model'
del content_dict['fields']['description'] # this is a old field, removed from the model JSON

data = StringIO(unicode(json.dumps(content_dict), 'utf-8'))
self.import_request.FILES['file'] = InMemoryUploadedFile(data, None, 'data', 'application/json', 500, None, {})

res = self.import_view.post(self.import_request)

provider_id = ''.join([i for i in res.url if i.isdigit()])
new_provider = PreprintProvider.objects.get(id=provider_id)

nt.assert_equal(res.status_code, 302)
nt.assert_equal(new_provider._id, 'new_id')

def test_update_provider_existing_subjects(self):
# If there are existing subjects for a provider, imported subjects are ignored
self.import_view.kwargs = {'preprint_provider_id': self.preprint_provider.id}
Expand All @@ -281,9 +302,9 @@ def test_update_provider_existing_subjects(self):

nt.assert_equal(res.status_code, 302)
nt.assert_equal(new_provider_id, self.preprint_provider.id)
# nt.assert_equal(self.preprint_provider.subjects.all().count(), 1)
nt.assert_equal(self.preprint_provider.subjects.all().count(), 1)
nt.assert_equal(self.preprint_provider.licenses_acceptable.all().count(), 1)
# nt.assert_equal(self.preprint_provider.subjects.all()[0].text, self.subject.text)
nt.assert_equal(self.preprint_provider.subjects.all()[0].text, self.subject.text)
nt.assert_equal(self.preprint_provider.licenses_acceptable.all()[0].license_id, 'CCBY')


Expand Down
6 changes: 6 additions & 0 deletions api/base/generic_bulk_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ def get_requested_resources(self, request, request_data):
Retrieves resources in request body
"""
model_cls = request.parser_context['view'].model_class

requested_ids = [data['id'] for data in request_data]
column_name = 'guids___id' if issubclass(model_cls, GuidMixin) else '_id'
resource_object_list = model_cls.find(Q(column_name, 'in', requested_ids))
Expand All @@ -87,6 +88,11 @@ def get_requested_resources(self, request, request_data):
if len(resource_object_list) != len(request_data):
raise ValidationError({'non_field_errors': 'Could not find all objects to delete.'})

if column_name == 'guids___id':
resource_object_list = [resource_object_list.get(guids___id=id) for id in requested_ids]
else:
resource_object_list = [resource_object_list.get(_id=id) for id in requested_ids]

return resource_object_list

def allow_bulk_destroy_resources(self, user, resource_list):
Expand Down
Loading

0 comments on commit cb7c5dc

Please sign in to comment.