forked from sanger-archive/aker-material-service
-
Notifications
You must be signed in to change notification settings - Fork 0
/
run.py
117 lines (85 loc) · 3.27 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import os
import logging
import uuid
import json
import copy
import pdb
from uuid_encoder import UUIDEncoder
from uuid_validator import UUIDValidator
from eve import Eve
from flask import request, jsonify, abort, Response, current_app
from flask_bootstrap import Bootstrap
from eve_swagger import swagger
from bson import json_util
environment = os.getenv('EVE_ENV', 'development')
SETTINGS_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'db', environment + '.py')
def create_app(settings):
app = Eve(settings=settings, json_encoder=UUIDEncoder, validator=UUIDValidator)
Bootstrap(app)
app.register_blueprint(swagger)
def set_uuid(resource_name, items):
for item in items:
item['_id'] = str(uuid.uuid4())
app.on_insert += set_uuid
# Very rudimentary validation method... just for development!
@app.route('/materials/validate', methods=['POST'])
def validate(**lookup):
if not 'materials' in request.json:
abort(422)
validation_set = set(request.json['materials'])
result_set = set()
for material in app.data.driver.db.materials.find({'_id': { '$in': request.json['materials'] } }, { '_id': 1}):
result_set.add(material['_id'])
difference = validation_set - result_set
diff_len = len(difference)
if (diff_len == 0):
return "ok"
else:
return "not ok - " + str(diff_len) + " materials not found"
def cerberus_to_json_schema(schema_obj):
schema_obj_copy = copy.deepcopy(schema_obj)
filter_list = ['meta', '_id', 'parent','ancestors']
for key in schema_obj_copy:
if schema_obj_copy[key]['type']=='datetime':
schema_obj_copy[key]['type'] = 'string'
schema_obj_copy[key]['format'] = 'date'
for key in filter_list:
if schema_obj_copy[key]:
del schema_obj_copy[key]
return schema_obj_copy
@app.route('/materials/schema', methods=['GET'])
def bulk_schema(**lookup):
schema_obj = cerberus_to_json_schema(current_app.config['DOMAIN']['materials']['schema'])
schema_str = json.dumps(schema_obj, default=json_util.default)
resp = Response(response=schema_str, status=200, mimetype="application/json")
return (resp)
@app.route('/materials/bulk_get', methods=['POST'])
def bulk_get(**lookup):
if not 'materials' in request.json:
abort(422)
materials = []
for material in app.data.driver.db.materials.find({'_id': { '$in': request.json['materials'] } }):
materials.append(material)
materials = json.dumps(materials, default=json_util.default)
resp = Response(response=materials,
status=200, \
mimetype="application/json")
return (resp)
return app
# enable logging to 'app.log' file
handler = logging.FileHandler('app.log')
# set a custom log format, and add request
# metadata to each log line
handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(filename)s:%(lineno)d] -- ip: %(clientip)s, '
'url: %(url)s, method:%(method)s'))
app = create_app(SETTINGS_PATH)
# the default log level is set to WARNING, so
# we have to explictly set the logging level
# to INFO to get our custom message logged.
app.logger.setLevel(logging.INFO)
# append the handler to the default application logger
app.logger.addHandler(handler)
if __name__ == '__main__':
app.run()