diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000..1ebd4db
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,14 @@
+## Motivation
+
+write the motivation
+
+## Goal
+- write the goal
+
+write the description
+
+## Implementation
+- feature
+- feature
+- resource
+- testing
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..b536ce9
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,21 @@
+## What is this PR for?
+
+write a reason
+
+## This PR includes
+
+- item
+- item
+- item
+
+## What type of PR is it?
+
+Feature/Bugfix/....
+
+## What is the issue?
+
+N/A
+
+## How should this be tested?
+
+write a method and a sample of command
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..26ac6fb
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,118 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# dotenv
+.env
+
+# virtualenv
+.venv
+venv/
+ENV/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+
+# PyCharm
+.idea/
+
+# ML Model
+model/
+
+# grpc file
+drucker_pb2.py
+drucker_pb2_grpc.py
+
+# sqlite
+db.sqlite3
+db.test.sqlite3
+
+# Mac OS temporary file
+.DS_Store
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..1160df1
--- /dev/null
+++ b/README.md
@@ -0,0 +1,134 @@
+# Drucker
+Drucker is a framework of serving machine learning module. Drucker makes it easy to serve, manage and integrate your ML models into your existing services. Moreover, Drucker can be used on Kubernetes.
+
+## Parent Project
+https://github.com/drucker/drucker-parent
+
+## Components
+- [Drucker](https://github.com/drucker/drucker) (here): Serving framework for a machine learning module.
+- [Drucker-dashboard](https://github.com/drucker/drucker-dashboard): Management web service for the machine learning models to the drucker service.
+- [Drucker-client](https://github.com/drucker/drucker-client): SDK for accessing a drucker service.
+- [Drucker-example](https://github.com/drucker/drucker-example): Example of how to use drucker.
+
+## Example
+Example is available [here](https://github.com/drucker/drucker-example).
+
+## Procedures
+### Git Submodule Add
+```
+$ git submodule add https://github.com/drucker/drucker.git drucker
+$ git submodule add https://github.com/drucker/drucker-grpc-proto.git drucker-grpc-proto
+$ cp ./drucker/template/settings.yml .
+$ cp ./drucker/template/predict.py .
+$ cp ./drucker/template/server.py .
+$ cp ./drucker/template/start.sh .
+```
+
+### When update comes
+```
+$ git submodule update --recursive
+```
+
+Check the files above and if they had updates, merge them to your files.
+
+### Edit settings.yml
+```
+$ vi settings.yml
+```
+
+### Edit predict.py
+```
+$ vi predict.py
+```
+
+Write the following methods.
+
+#### load_model
+Loading ML model to your ML module. This method is called on the wakeup or switch model.
+
+Argument `model_path` is the path of a ML model. You can load the model like this.
+
+```
+self.predictor = joblib.load(model_path)
+```
+
+We recommend the architecture of "1 Drucker loads 1 file" but sometimes your module needs a several files to load. In that case you need to create a compressed file including the files it requires. `model_path` will be your compressed file and you decompress it by yourself.
+
+#### predict
+Predicting or inferencing from the input. The definitions of input or output are described below. `bytes` can be a byte data of a file.
+
+##### Input format
+*V* is the length of feature vector.
+
+|Field |Type |Description |
+|:---|:---|:---|
+|input
(required) |One of below
- string
- bytes
- string[*V*]
- int[*V*]
- double[*V*] |Input data for inference.
- "Nice weather." for a sentiment analysis.
- PNG file for an image transformation.
- ["a", "b"] for a text summarization.
- [1, 2] for a sales forcast.
- [0.9, 0.1] for mnist data. |
+|option |string| Option field. Must be json format. |
+
+The "option" field needs to be a json format. Any style is Ok but we have some reserved fields below.
+
+|Field |Type |Description |
+|:---|:---|:---|
+|suppress_log_input |bool |True: NOT print the input and output to the log message.
False (default): Print the input and outpu to the log message.
+
+##### Output format
+*M* is the number of classes. If your algorithm is a binary classifier, you set *M* to 1. If your algorithm is a multi-class classifier, you set *M* to the number of classes.
+
+|Field |Type |Description |
+|:---|:---|:---|
+|label
(required) |One of below
-string
-bytes
-string[*M*]
-int[*M*]
-double[*M*] |Result of inference.
-"positive" for a sentiment analysis.
-PNG file for an image transformation.
-["a", "b"] for a multi-class classification.
-[1, 2] for a multi-class classification.
-[0.9, 0.1] for a multi-class classification. |
+|score
(required) |One of below
-double
-double[*M*] |Score of result.
-0.98 for a binary classification.
-[0.9, 0.1] for a multi-class classification. |
+|option |string |Option field. Must be json format. |
+
+#### evaluate (TODO)
+Evaluating the precision, recall and f1value of your ML model. The definitions of input or output are described below.
+
+##### Input format
+|Field |Type |Description |
+|:---|:---|:---|
+|file
(required) |bytes |Data for performance check |
+
+##### Output format
+*N* is the number of evaluation data. *M* is the number of classes. If your algorithm is a binary classifier, you set *M* to 1. If your algorithm is a multi-class classifier, you set *M* to the number of classes.
+
+|Field |Type |Description |
+|:---|:---|:---|
+|num
(required)|int |Number of evaluation data. |
+|accuracy
(required) |double |Accuracy. |
+|precision
(required) |double[*N*][*M*] |Precision. |
+|recall
(required) |double[*N*][*M*] |Recall. |
+|fvalue
(required) |double[*N*][*M*] |F1 value. |
+
+### Edit server.py
+```
+$ vi server.py
+```
+
+Since `drucker_pb2_grpc` is automatically generated from `drucker.proto`, you don't need to care about it. You need to implement the interface class of `SystemLoggerInterface` and `ServiceLoggerInterface` if you customize the log output.
+
+### Edit start.sh
+```
+$ vi start.sh
+```
+
+Write the necessity script to boot your Drucker service. Minimum requirement is below.
+
+```
+pip install -r ./drucker-grpc-proto/requirements.txt
+python ./drucker-grpc-proto/run_codegen.py
+
+python server.py
+```
+
+### Run it!
+```
+$ sh start.sh
+```
+
+## Drucker on Kubernetes
+Drucker dashboard makes it easy to launch Drucker service on Kubernetes.
+
+You must read followings.
+
+1. https://github.com/drucker/drucker-parent/tree/master/docs/UsageDruckerOnKubernetes.md
+1. https://github.com/drucker/drucker-dashboard/README.md
\ No newline at end of file
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 0000000..0a4027a
--- /dev/null
+++ b/__init__.py
@@ -0,0 +1,8 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+
+sd = os.path.abspath(os.path.dirname(__file__))
+sys.path.append(sd)
diff --git a/core/__init__.py b/core/__init__.py
new file mode 100644
index 0000000..0a4027a
--- /dev/null
+++ b/core/__init__.py
@@ -0,0 +1,8 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+
+sd = os.path.abspath(os.path.dirname(__file__))
+sys.path.append(sd)
diff --git a/core/drucker_dashboard_servicer.py b/core/drucker_dashboard_servicer.py
new file mode 100644
index 0000000..891cdc9
--- /dev/null
+++ b/core/drucker_dashboard_servicer.py
@@ -0,0 +1,180 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import traceback
+
+import grpc
+import types
+import shutil
+import uuid
+from pathlib import Path
+
+import drucker_pb2
+import drucker_pb2_grpc
+
+from grpc._server import _Context
+from typing import Iterator
+
+from logger.logger_interface import SystemLoggerInterface
+from core.predict_interface import PredictInterface
+from utils.env_loader import SERVICE_NAME, SERVICE_LEVEL, APPLICATION_NAME, SERVICE_INFRA
+
+from models import db, get_model_path
+from models.model_assignment import ModelAssignment
+
+
+def error_handling(error_response):
+ """ Decorator for handling error
+
+ Apply following processing on Servicer methods
+ to handle errors.
+
+ - DB transaction decorating for Servicer class.
+ Confirm to call :func:``db.session.commit``
+ on success operation and :func:``db.session.rollback``
+ on fail.
+ - Error setup for gRPC errors
+ - Call :func:``on_error`` method (if defined) in the class
+ to postprocess something on error
+
+ Parameters
+ ----------
+ error_response
+ gRPC response instance on error
+
+ """
+
+ def _wrapper_maker(func):
+ def _wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except Exception as error:
+ # DB rollback
+ db.session.rollback()
+
+ # gRPC
+ context = args[2]
+ context.set_code(grpc.StatusCode.UNKNOWN)
+ context.set_details(str(error))
+
+ servicer = args[0]
+ if hasattr(servicer, 'on_error'):
+ assert isinstance(servicer.on_error, types.MethodType), \
+ 'You must define on_error as method'
+ servicer.on_error(error)
+ return error_response
+ finally:
+ db.session.close()
+
+ return _wrapper
+
+ return _wrapper_maker
+
+
+class DruckerDashboardServicer(drucker_pb2_grpc.DruckerDashboardServicer):
+ """ gRPC servicer to manage environment
+
+ - Applications
+ Machine leagning applications
+ - Services
+ Unit to deploy machine learning models
+ (Corresponding to Service of K8S)
+ - Models
+ Machine learning model
+ """
+
+ def __init__(self, logger: SystemLoggerInterface, predictor: PredictInterface):
+ self.logger = logger
+ self.predictor = predictor
+
+ def on_error(self, error: Exception):
+ """ Postprocessing on error
+
+ For detail, see :func:``on_error``
+
+ Parameters
+ ----------
+ error : Exception
+ Error to be handled
+ """
+ self.logger.error(str(error))
+ self.logger.error(traceback.format_exc())
+
+ def ServiceInfo(self,
+ request: drucker_pb2.ServiceInfoRequest,
+ context: _Context
+ ) -> drucker_pb2.ServiceInfoResponse:
+ """ Get service info.
+ """
+ return drucker_pb2.ServiceInfoResponse(application_name=APPLICATION_NAME,
+ service_name=SERVICE_NAME,
+ service_level=SERVICE_LEVEL)
+
+ @error_handling(drucker_pb2.ModelResponse(status=0, message='Error: Uploading model file.'))
+ def UploadModel(self,
+ request_iterator: Iterator[drucker_pb2.UploadModelRequest],
+ context: _Context
+ ) -> drucker_pb2.ModelResponse:
+ """ Upload your latest ML model.
+ """
+ save_path = None
+ tmp_path = get_model_path(uuid.uuid4().hex)
+ Path(tmp_path).parent.mkdir(parents=True, exist_ok=True)
+ with open(tmp_path, 'wb') as f:
+ for request in request_iterator:
+ save_path = request.path
+ model_data = request.data
+ f.write(model_data)
+ f.close()
+ model_path = get_model_path(save_path)
+ Path(model_path).parent.mkdir(parents=True, exist_ok=True)
+ shutil.move(tmp_path, model_path)
+ return drucker_pb2.ModelResponse(status=1,
+ message='Success: Uploading model file.')
+
+ @error_handling(drucker_pb2.ModelResponse(status=0, message='Error: Switching model file.'))
+ def SwitchModel(self,
+ request: drucker_pb2.SwitchModelRequest,
+ context: _Context
+ ) -> drucker_pb2.ModelResponse:
+ """ Switch your ML model to run.
+ """
+ model_assignment = db.session.query(ModelAssignment).filter(ModelAssignment.service_name == SERVICE_NAME).one()
+ model_assignment.model_path = request.path
+ model_assignment.first_boot = False
+ db.session.commit()
+ model_path = get_model_path()
+
+ # :TODO: Use enum for SERVICE_INFRA
+ if SERVICE_INFRA == "kubernetes":
+ pass
+ elif SERVICE_INFRA == "default":
+ self.predictor.load_model(model_path)
+
+ return drucker_pb2.ModelResponse(status=1,
+ message='Success: Switching model file.')
+
+ def EvaluateModel(self,
+ request_iterator: Iterator[drucker_pb2.EvaluateModelRequest],
+ context: _Context
+ ) -> drucker_pb2.EvaluateModelResponse:
+ """ Evaluate your ML model.
+ :TODO: in detail.
+ """
+ try:
+ for evaluateModelRequest in request_iterator:
+ test_data = evaluateModelRequest.data
+ result = self.predictor.evaluate(test_data)
+ return drucker_pb2.EvaluateModelResponse(num=result.num,
+ accuracy=result.accuracy,
+ precision=result.precision,
+ recall=result.recall,
+ fvalue=result.fvalue)
+ except Exception as e:
+ self.logger.error(str(e))
+ self.logger.error(traceback.format_exc())
+ return drucker_pb2.EvaluateModelResponse(num=0,
+ accuracy=0,
+ precision=0,
+ recall=0,
+ fvalue=0)
diff --git a/core/drucker_worker_servicer.py b/core/drucker_worker_servicer.py
new file mode 100644
index 0000000..38f3337
--- /dev/null
+++ b/core/drucker_worker_servicer.py
@@ -0,0 +1,282 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import drucker_pb2
+import drucker_pb2_grpc
+
+import json
+
+from enum import Enum
+from grpc._server import _Context
+from typing import Iterator, Union
+
+from logger.logger_interface import ServiceLoggerInterface
+from core.predict_interface import PredictResult, PredictInterface
+
+DruckerInput = Union[
+ drucker_pb2.StringInput, drucker_pb2.BytesInput,
+ drucker_pb2.ArrIntInput, drucker_pb2.ArrFloatInput, drucker_pb2.ArrStringInput]
+DruckerOutput = Union[
+ drucker_pb2.StringOutput, drucker_pb2.BytesOutput,
+ drucker_pb2.ArrIntOutput, drucker_pb2.ArrFloatOutput, drucker_pb2.ArrStringOutput]
+
+
+class DruckerWorkerServicer(drucker_pb2_grpc.DruckerWorkerServicer):
+ class Type(Enum):
+ STRING = 1
+ BYTES = 2
+ ARRAY_INT = 3
+ ARRAY_FLOAT = 4
+ ARRAY_STRING = 5
+
+ def __init__(self, logger: ServiceLoggerInterface, predictor: PredictInterface):
+ self.logger = logger
+ self.predictor = predictor
+
+ def Process(self,
+ request: DruckerInput,
+ response: DruckerOutput
+ ) -> DruckerOutput:
+
+ input = request.input
+ try:
+ ioption = json.loads(request.option.val)
+ except:
+ ioption = {request.option.val: request.option.val}
+
+ single_output = self.predictor.get_type_output() in [self.Type.STRING, self.Type.BYTES]
+ try:
+ result = self.predictor.predict(input, ioption)
+ except:
+ if single_output:
+ if isinstance(response, drucker_pb2.StringOutput):
+ label = "None"
+ elif isinstance(response, drucker_pb2.BytesOutput):
+ label = b'None'
+ else:
+ label = None
+ result = PredictResult(label=label, score=0.0, option={})
+ else:
+ if isinstance(response, drucker_pb2.ArrStringOutput):
+ label = ["None"]
+ elif isinstance(response, drucker_pb2.ArrIntOutput):
+ label = [0]
+ elif isinstance(response, drucker_pb2.ArrFloatOutput):
+ label = [0.0]
+ else:
+ label = None
+ result = PredictResult(label=label, score=[0.0], option={})
+ if single_output:
+ response.output = result.label
+ response.score = result.score
+ else:
+ response.output.extend(result.label)
+ response.score.extend(result.score)
+ response.option.val = result.option
+ self.logger.emit(request, response, ioption.get('suppress_log_inout', False))
+ return response
+
+ def Predict_String_String(self,
+ request: drucker_pb2.StringInput,
+ context: _Context
+ ) -> drucker_pb2.StringOutput:
+ response = drucker_pb2.StringOutput()
+ self.predictor.set_type(self.Type.STRING, self.Type.STRING)
+ return self.Process(request, response)
+
+ def Predict_String_Bytes(self,
+ request: drucker_pb2.StringInput,
+ context: _Context
+ ) -> drucker_pb2.BytesOutput:
+ response = drucker_pb2.BytesOutput()
+ self.predictor.set_type(self.Type.STRING, self.Type.BYTES)
+ yield self.Process(request, response)
+
+ def Predict_String_ArrInt(self,
+ request: drucker_pb2.StringInput,
+ context: _Context
+ ) -> drucker_pb2.ArrIntOutput:
+ response = drucker_pb2.ArrIntOutput()
+ self.predictor.set_type(self.Type.STRING, self.Type.ARRAY_INT)
+ return self.Process(request, response)
+
+ def Predict_String_ArrFloat(self,
+ request: drucker_pb2.StringInput,
+ context: _Context
+ ) -> drucker_pb2.ArrFloatOutput:
+ response = drucker_pb2.ArrFloatOutput()
+ self.predictor.set_type(self.Type.STRING, self.Type.ARRAY_FLOAT)
+ return self.Process(request, response)
+
+ def Predict_String_ArrString(self,
+ request: drucker_pb2.StringInput,
+ context: _Context
+ ) -> drucker_pb2.ArrStringOutput:
+ response = drucker_pb2.ArrStringOutput()
+ self.predictor.set_type(self.Type.STRING, self.Type.ARRAY_STRING)
+ return self.Process(request, response)
+
+ def Predict_Bytes_String(self,
+ request_iterator: Iterator[drucker_pb2.BytesInput],
+ context: _Context
+ ) -> drucker_pb2.StringOutput:
+ for request in request_iterator:
+ response = drucker_pb2.StringOutput()
+ self.predictor.set_type(self.Type.BYTES, self.Type.STRING)
+ return self.Process(request, response)
+
+ def Predict_Bytes_Bytes(self,
+ request_iterator: Iterator[drucker_pb2.BytesInput],
+ context: _Context
+ ) -> drucker_pb2.BytesOutput:
+ for request in request_iterator:
+ response = drucker_pb2.BytesOutput()
+ self.predictor.set_type(self.Type.BYTES, self.Type.BYTES)
+ yield self.Process(request, response)
+
+ def Predict_Bytes_ArrInt(self,
+ request_iterator: Iterator[drucker_pb2.BytesInput],
+ context: _Context
+ ) -> drucker_pb2.ArrIntOutput:
+ for request in request_iterator:
+ response = drucker_pb2.ArrIntOutput()
+ self.predictor.set_type(self.Type.BYTES, self.Type.ARRAY_INT)
+ return self.Process(request, response)
+
+ def Predict_Bytes_ArrFloat(self,
+ request_iterator: Iterator[drucker_pb2.BytesInput],
+ context: _Context
+ ) -> drucker_pb2.ArrFloatOutput:
+ for request in request_iterator:
+ response = drucker_pb2.ArrFloatOutput()
+ self.predictor.set_type(self.Type.BYTES, self.Type.ARRAY_FLOAT)
+ return self.Process(request, response)
+
+ def Predict_Bytes_ArrString(self,
+ request_iterator: Iterator[drucker_pb2.BytesInput],
+ context: _Context
+ ) -> drucker_pb2.ArrStringOutput:
+ for request in request_iterator:
+ response = drucker_pb2.ArrStringOutput()
+ self.predictor.set_type(self.Type.BYTES, self.Type.ARRAY_STRING)
+ return self.Process(request, response)
+
+ def Predict_ArrInt_String(self,
+ request: drucker_pb2.ArrIntInput,
+ context: _Context
+ ) -> drucker_pb2.StringOutput:
+ response = drucker_pb2.StringOutput()
+ self.predictor.set_type(self.Type.ARRAY_INT, self.Type.STRING)
+ return self.Process(request, response)
+
+ def Predict_ArrInt_Bytes(self,
+ request: drucker_pb2.ArrIntInput,
+ context: _Context
+ ) -> drucker_pb2.BytesOutput:
+ response = drucker_pb2.BytesOutput()
+ self.predictor.set_type(self.Type.ARRAY_INT, self.Type.BYTES)
+ yield self.Process(request, response)
+
+ def Predict_ArrInt_ArrInt(self,
+ request: drucker_pb2.ArrIntInput,
+ context: _Context
+ ) -> drucker_pb2.ArrIntOutput:
+ response = drucker_pb2.ArrIntOutput()
+ self.predictor.set_type(self.Type.ARRAY_INT, self.Type.ARRAY_INT)
+ return self.Process(request, response)
+
+ def Predict_ArrInt_ArrFloat(self,
+ request: drucker_pb2.ArrIntInput,
+ context: _Context
+ ) -> drucker_pb2.ArrFloatOutput:
+ response = drucker_pb2.ArrFloatOutput()
+ self.predictor.set_type(self.Type.ARRAY_INT, self.Type.ARRAY_FLOAT)
+ return self.Process(request, response)
+
+ def Predict_ArrInt_ArrString(self,
+ request: drucker_pb2.ArrIntInput,
+ context: _Context
+ ) -> drucker_pb2.ArrStringOutput:
+ response = drucker_pb2.ArrStringOutput()
+ self.predictor.set_type(self.Type.ARRAY_INT, self.Type.ARRAY_STRING)
+ return self.Process(request, response)
+
+ def Predict_ArrFloat_String(self,
+ request: drucker_pb2.ArrFloatInput,
+ context: _Context
+ ) -> drucker_pb2.StringOutput:
+ response = drucker_pb2.StringOutput()
+ self.predictor.set_type(self.Type.ARRAY_FLOAT, self.Type.STRING)
+ return self.Process(request, response)
+
+ def Predict_ArrFloat_Bytes(self,
+ request: drucker_pb2.ArrFloatInput,
+ context: _Context
+ ) -> drucker_pb2.BytesOutput:
+ response = drucker_pb2.BytesOutput()
+ self.predictor.set_type(self.Type.ARRAY_FLOAT, self.Type.BYTES)
+ yield self.Process(request, response)
+
+ def Predict_ArrFloat_ArrInt(self,
+ request: drucker_pb2.ArrFloatInput,
+ context: _Context
+ ) -> drucker_pb2.ArrIntOutput:
+ response = drucker_pb2.ArrIntOutput()
+ self.predictor.set_type(self.Type.ARRAY_FLOAT, self.Type.ARRAY_INT)
+ return self.Process(request, response)
+
+ def Predict_ArrFloat_ArrFloat(self,
+ request: drucker_pb2.ArrFloatInput,
+ context: _Context
+ ) -> drucker_pb2.ArrFloatOutput:
+ response = drucker_pb2.ArrFloatOutput()
+ self.predictor.set_type(self.Type.ARRAY_FLOAT, self.Type.ARRAY_FLOAT)
+ return self.Process(request, response)
+
+ def Predict_ArrFloat_ArrString(self,
+ request: drucker_pb2.ArrFloatInput,
+ context: _Context
+ ) -> drucker_pb2.ArrStringOutput:
+ response = drucker_pb2.ArrStringOutput()
+ self.predictor.set_type(self.Type.ARRAY_FLOAT, self.Type.ARRAY_STRING)
+ return self.Process(request, response)
+
+ def Predict_ArrString_String(self,
+ request: drucker_pb2.ArrStringInput,
+ context: _Context
+ ) -> drucker_pb2.StringOutput:
+ response = drucker_pb2.StringOutput()
+ self.predictor.set_type(self.Type.ARRAY_STRING, self.Type.STRING)
+ return self.Process(request, response)
+
+ def Predict_ArrString_Bytes(self,
+ request: drucker_pb2.ArrStringInput,
+ context: _Context
+ ) -> drucker_pb2.BytesOutput:
+ response = drucker_pb2.BytesOutput()
+ self.predictor.set_type(self.Type.ARRAY_STRING, self.Type.BYTES)
+ yield self.Process(request, response)
+
+ def Predict_ArrString_ArrInt(self,
+ request: drucker_pb2.ArrStringInput,
+ context: _Context
+ ) -> drucker_pb2.ArrIntOutput:
+ response = drucker_pb2.ArrIntOutput()
+ self.predictor.set_type(self.Type.ARRAY_STRING, self.Type.ARRAY_INT)
+ return self.Process(request, response)
+
+ def Predict_ArrString_ArrFloat(self,
+ request: drucker_pb2.ArrStringInput,
+ context: _Context
+ ) -> drucker_pb2.ArrFloatOutput:
+ response = drucker_pb2.ArrFloatOutput()
+ self.predictor.set_type(self.Type.ARRAY_STRING, self.Type.ARRAY_FLOAT)
+ return self.Process(request, response)
+
+ def Predict_ArrString_ArrString(self,
+ request: drucker_pb2.ArrStringInput,
+ context: _Context
+ ) -> drucker_pb2.ArrStringOutput:
+ response = drucker_pb2.ArrStringOutput()
+ self.predictor.set_type(self.Type.ARRAY_STRING, self.Type.ARRAY_STRING)
+ return self.Process(request, response)
diff --git a/core/predict_interface.py b/core/predict_interface.py
new file mode 100644
index 0000000..3be9a6f
--- /dev/null
+++ b/core/predict_interface.py
@@ -0,0 +1,64 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import json
+
+from abc import ABCMeta, abstractmethod
+from enum import Enum
+from typing import Union, List
+
+PredictLabel = Union[str, bytes, List[str], List[int], List[float]]
+PredictScore = Union[float, List[float]]
+
+
+class PredictResult:
+ def __init__(self, label: PredictLabel, score: PredictScore, option: dict = None):
+ self.label = label
+ self.score = score
+ self.option = json.dumps(option) if option is not None else '{}'
+
+
+class EvaluateResult:
+ def __init__(self, num: int = None, accuracy: float = None,
+ precision: list = None, recall: list = None,
+ fvalue: list = None):
+ if num is None:
+ self.num = 0
+ self.accuracy = 0.0
+ self.precision = [0]
+ self.recall = [0]
+ self.fvalue = [0]
+ else:
+ self.num = num
+ self.accuracy = accuracy
+ self.precision = precision
+ self.recall = recall
+ self.fvalue = fvalue
+
+
+class PredictInterface(metaclass=ABCMeta):
+ def __init__(self):
+ self.type_input = None
+ self.type_output = None
+
+ def set_type(self, type_input: Enum, type_output: Enum) -> None:
+ self.type_input = type_input
+ self.type_output = type_output
+
+ def get_type_input(self) -> Enum:
+ return self.type_input
+
+ def get_type_output(self) -> Enum:
+ return self.type_output
+
+ @abstractmethod
+ def load_model(self, model_path: str) -> None:
+ raise NotImplemented()
+
+ @abstractmethod
+ def predict(self, input: PredictLabel, option: dict = None) -> PredictResult:
+ raise NotImplemented()
+
+ @abstractmethod
+ def evaluate(self, file: bytes) -> EvaluateResult:
+ raise NotImplemented()
diff --git a/logger/logger_fluent.py b/logger/logger_fluent.py
new file mode 100644
index 0000000..d9ec8b7
--- /dev/null
+++ b/logger/logger_fluent.py
@@ -0,0 +1,129 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import logging
+import sys
+import time
+from socket import gethostname
+
+from fluent import handler, sender
+
+from logger.logger_interface import SystemLoggerInterface, ServiceLoggerInterface
+from utils.env_loader import ServiceEnvType, SERVICE_LEVEL_ENUM, APPLICATION_NAME
+
+
+class SystemLogger(SystemLoggerInterface):
+
+ def __init__(self, logger_name: str = 'drucker',
+ log_level: int = logging.NOTSET, app_name: str = APPLICATION_NAME,
+ app_env: ServiceEnvType = ServiceEnvType.DEVELOPMENT) -> None:
+ """
+ constructor
+ :param logger_name:
+ :param log_level:
+ :param app_name:
+ :param app_env:
+ """
+ super().__init__()
+ self.log = logging.getLogger(logger_name)
+ self.log.setLevel(log_level)
+
+ custom_format = {
+ 'host': gethostname(),
+ 'short_message': '%(message)s',
+ 'timestamp': '%(created)d.%(msecs)d',
+ 'level': '%(loglevel)d',
+ 'service': 'drucker',
+ 'ml_service': app_name,
+ 'service_level': app_env
+ }
+
+ fluent_handler = handler.FluentHandler('drucker')
+ formatter = handler.FluentRecordFormatter(custom_format)
+ fluent_handler.setFormatter(formatter)
+ fluent_handler.setLevel(log_level)
+ self.log.addHandler(fluent_handler)
+
+ def exception(self, message: str) -> None:
+ """
+ emits exception to log
+ :param message: error message
+ """
+ self.log.error(message, exc_info=sys.exc_info(), stack_info=True, extra={'loglevel': 3})
+
+ def error(self, message: str) -> None:
+ """
+ emits error log
+ :param message: log
+ """
+ self.log.error(message, extra={'loglevel': 3})
+
+ def debug(self, message: str) -> None:
+ """
+ emits debug log
+ :param message: log
+ """
+ self.log.debug(message, extra={'loglevel': 7})
+
+ def info(self, message: str) -> None:
+ """
+ emits info log
+ :param message: log
+ """
+ self.log.info(message, extra={'loglevel': 6})
+
+ def warn(self, message: str) -> None:
+ """
+ emits warn log
+ :param message: log
+ """
+ self.log.warning(message, extra={'loglevel': 4})
+
+
+class ServiceLogger(ServiceLoggerInterface):
+
+ def __init__(self, app_name: str = APPLICATION_NAME,
+ app_env: ServiceEnvType = ServiceEnvType.DEVELOPMENT):
+ """
+ constructor
+ :param app_name:
+ :param app_env:
+ """
+ super().__init__()
+ self.logger = sender.FluentSender('drucker_service')
+ self.ml_service = app_name
+ self.service_level = app_env
+
+ def emit(self, request, response, suppress_log_inout: bool = False) -> None:
+ """
+ emits service log
+ :param request:
+ :param response:
+ :param suppress_log_inout:
+ :return:
+ """
+ try:
+ if suppress_log_inout:
+ ml_input = ''
+ ml_output = ''
+ else:
+ ml_input = super().to_str_from_request(request)
+ ml_output = super().to_str_from_response(response)
+
+ self.logger.emit(None, {
+ 'host': gethostname(),
+ 'short_message': 'prediction result.',
+ 'timestamp': int(time.time() * 1000) / 1000,
+ 'level': logging.INFO,
+ 'service': 'drucker',
+ 'ml_service': self.ml_service,
+ 'service_level': self.service_level,
+ 'ml_input': ml_input,
+ 'ml_output': ml_output
+ })
+ except:
+ try:
+ SystemLogger(logger_name="ServiceLogger", app_name=APPLICATION_NAME,
+ app_env=SERVICE_LEVEL_ENUM).exception("can't write log")
+ except:
+ pass
diff --git a/logger/logger_interface.py b/logger/logger_interface.py
new file mode 100644
index 0000000..68bf6ee
--- /dev/null
+++ b/logger/logger_interface.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import json
+from abc import ABCMeta, abstractmethod
+
+
+class SystemLoggerInterface(metaclass=ABCMeta):
+ @abstractmethod
+ def exception(self, message: str) -> None:
+ raise NotImplemented()
+
+ @abstractmethod
+ def error(self, message: str) -> None:
+ raise NotImplemented()
+
+ @abstractmethod
+ def debug(self, message: str) -> None:
+ raise NotImplemented()
+
+ @abstractmethod
+ def info(self, message: str) -> None:
+ raise NotImplemented()
+
+ @abstractmethod
+ def warn(self, message: str) -> None:
+ raise NotImplemented()
+
+
+class ServiceLoggerInterface(metaclass=ABCMeta):
+ @abstractmethod
+ def emit(self, request, response, suppress_log_inout: bool = False) -> None:
+ raise NotImplemented()
+
+ # noinspection PyMethodMayBeStatic
+ def to_str_from_request(self, request) -> str:
+ tmp = {'option': request.option.val}
+ if isinstance(request.input, (str, bytes)):
+ tmp['input'] = str(request.input)
+ else:
+ tmp['input'] = list(request.input)
+ return json.dumps(tmp)
+
+ # noinspection PyMethodMayBeStatic
+ def to_str_from_response(self, response) -> str:
+ tmp = {'option': response.option.val}
+ if isinstance(response.output, (str, bytes)):
+ tmp['output'] = str(response.output)
+ tmp['score'] = response.score
+ else:
+ tmp['output'] = list(response.output)
+ tmp['score'] = list(response.score)
+ return json.dumps(tmp)
diff --git a/logger/logger_jsonlogger.py b/logger/logger_jsonlogger.py
new file mode 100644
index 0000000..c338c13
--- /dev/null
+++ b/logger/logger_jsonlogger.py
@@ -0,0 +1,160 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import logging
+import sys
+import time
+from socket import gethostname
+
+from pythonjsonlogger import jsonlogger
+
+from logger.logger_interface import SystemLoggerInterface, ServiceLoggerInterface
+from utils.env_loader import ServiceEnvType, SERVICE_LEVEL_ENUM, APPLICATION_NAME
+
+
+class SystemLogger(SystemLoggerInterface):
+ class JsonFormatter(jsonlogger.JsonFormatter):
+ def parse(self):
+ return [
+ 'host',
+ 'short_message',
+ 'timestamp',
+ 'level',
+ 'service',
+ 'ml_service',
+ 'service_level',
+ ]
+
+ def add_fields(self, log_record, record, message_dict):
+ super().add_fields(log_record, record, message_dict)
+ log_record['host'] = gethostname()
+ log_record['timestamp'] = int(time.time() * 1000) / 1000
+ log_record['service'] = 'drucker'
+
+ def __init__(self, logger_name: str = 'drucker',
+ log_level: int = logging.NOTSET, app_name: str = APPLICATION_NAME,
+ app_env: ServiceEnvType = ServiceEnvType.DEVELOPMENT) -> None:
+ """
+ constructor
+ :param logger_name:
+ :param log_level:
+ :param app_name:
+ :param app_env:
+ """
+ super().__init__()
+ self.log = logging.getLogger(logger_name)
+ handler = logging.StreamHandler()
+ formatter = self.JsonFormatter()
+ handler.setFormatter(formatter)
+ self.log.addHandler(handler)
+ self.log.setLevel(log_level)
+ self.ml_service = app_name
+ self.service_level = app_env
+
+ def exception(self, message: str) -> None:
+ """
+ emits exception to log
+ :param message: error message
+ """
+ self.log.error(message, exc_info=sys.exc_info(), stack_info=True,
+ extra={'short_message': message, 'level': 3,
+ 'ml_service': self.ml_service,
+ 'service_level': self.service_level})
+
+ def error(self, message: str) -> None:
+ """
+ emits error log
+ :param message: log
+ """
+ self.log.error(message, extra={'short_message': message, 'level': 3,
+ 'ml_service': self.ml_service,
+ 'service_level': self.service_level})
+
+ def debug(self, message: str) -> None:
+ """
+ emits debug log
+ :param message: log
+ """
+ self.log.debug(message, extra={'short_message': message, 'level': 7,
+ 'ml_service': self.ml_service,
+ 'service_level': self.service_level})
+
+ def info(self, message: str) -> None:
+ """
+ emits info log
+ :param message: log
+ """
+ self.log.info(message, extra={'short_message': message, 'level': 6,
+ 'ml_service': self.ml_service,
+ 'service_level': self.service_level})
+
+ def warn(self, message: str) -> None:
+ """
+ emits warn log
+ :param message: log
+ """
+ self.log.warning(message, extra={'short_message': message, 'level': 4,
+ 'ml_service': self.ml_service,
+ 'service_level': self.service_level})
+
+
+class ServiceLogger(ServiceLoggerInterface):
+ class JsonFormatter(jsonlogger.JsonFormatter):
+ def parse(self):
+ return [
+ 'host',
+ 'short_message',
+ 'timestamp',
+ 'level',
+ 'service',
+ 'ml_service',
+ 'service_level',
+ 'ml_input',
+ 'ml_output',
+ ]
+
+ def add_fields(self, log_record, record, message_dict):
+ super().add_fields(log_record, record, message_dict)
+ log_record['host'] = gethostname()
+ log_record['timestamp'] = int(time.time() * 1000) / 1000
+ log_record['service'] = 'drucker'
+
+ def __init__(self, app_name: str = APPLICATION_NAME,
+ app_env: ServiceEnvType = ServiceEnvType.DEVELOPMENT):
+ """
+ constructor
+ """
+ super().__init__()
+ self.log = logging.getLogger("drucker.service")
+ handler = logging.StreamHandler()
+ formatter = self.JsonFormatter()
+ handler.setFormatter(formatter)
+ self.log.addHandler(handler)
+ self.log.setLevel(logging.DEBUG)
+ self.ml_service = app_name
+ self.service_level = app_env
+
+ def emit(self, request, response, suppress_log_inout: bool = False) -> None:
+ """
+ emits service log
+ """
+ try:
+ if suppress_log_inout:
+ ml_input = ''
+ ml_output = ''
+ else:
+ ml_input = super().to_str_from_request(request)
+ ml_output = super().to_str_from_response(response)
+
+ message = "prediction result."
+ self.log.info(message, extra={'short_message': message,
+ 'level': 6, 'ml_service': self.ml_service,
+ 'service_level': self.service_level,
+ 'ml_input': ml_input,
+ 'ml_output': ml_output})
+ except Exception:
+ try:
+ SystemLogger(logger_name="ServiceLogger", app_name=APPLICATION_NAME,
+ app_env=SERVICE_LEVEL_ENUM).exception("can't write log")
+ except:
+ pass
diff --git a/models/__init__.py b/models/__init__.py
new file mode 100644
index 0000000..c5b3894
--- /dev/null
+++ b/models/__init__.py
@@ -0,0 +1,101 @@
+from sqlalchemy import create_engine
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import sessionmaker, scoped_session
+from sqlalchemy.sql import exists
+from utils.env_loader import (
+ TEST_MODE,
+ DB_MODE,
+ DB_MYSQL_HOST,
+ DB_MYSQL_PORT,
+ DB_MYSQL_DBNAME,
+ DB_MYSQL_USER,
+ DB_MYSQL_PASSWORD,
+ SERVICE_NAME,
+ FILE_MODEL,
+ DIR_MODEL,
+ APPLICATION_NAME
+)
+
+
+def db_url():
+ """ Get full URL for DB
+
+ :TODO: Make configuarable of template of URL and encoding
+ :TODO: Use Enum for DB_MODE
+ :TODO: Use an appropriate "Exception"
+ """
+ if DB_MODE == "sqlite":
+ db_name = "db.test.sqlite3" if TEST_MODE else "db.sqlite3"
+ url = f'sqlite:///{db_name}'
+ elif DB_MODE == "mysql":
+ host = DB_MYSQL_HOST
+ port = DB_MYSQL_PORT
+ db_name = "test_"+DB_MYSQL_DBNAME if TEST_MODE else DB_MYSQL_DBNAME
+ user = DB_MYSQL_USER
+ password = DB_MYSQL_PASSWORD
+ url = f'mysql+pymysql://{user}:{password}@{host}:{port}/{db_name}?charset=utf8'
+ else:
+ raise Exception("Invalid DB_MODE.")
+ return url
+
+
+class DAO(object):
+ """ Data Access Object
+
+ This implementation is inspired by Flask-SQLAlchemy's one.
+ """
+
+ def __init__(self):
+ self.engine = create_engine(
+ db_url(),
+ encoding='utf-8',
+ echo=True
+ )
+
+ self.session = scoped_session(
+ sessionmaker(
+ autocommit=False,
+ autoflush=False,
+ bind=self.engine
+ )
+ )
+
+ self.ModelBase = declarative_base()
+
+
+db = DAO()
+
+from models.model_assignment import ModelAssignment
+
+if TEST_MODE:
+ db.ModelBase.metadata.drop_all(db.engine)
+db.ModelBase.metadata.create_all(db.engine)
+
+if not db.session.query(exists().where(ModelAssignment.service_name == SERVICE_NAME)).scalar():
+ model_assignment = ModelAssignment()
+ model_assignment.service_name = SERVICE_NAME
+ model_assignment.model_path = FILE_MODEL
+ model_assignment.first_boot = True
+ db.session.add(model_assignment)
+ db.session.commit()
+
+
+def get_model_path(model_path: str = None):
+ if model_path is None:
+ model_path = FILE_MODEL
+
+ result = db.session.query(ModelAssignment). \
+ filter(ModelAssignment.service_name == SERVICE_NAME). \
+ one_or_none()
+
+ if result is not None:
+ model_path = result.model_path
+ return "{0}/{1}/{2}".format(DIR_MODEL, APPLICATION_NAME, model_path)
+
+
+SERVICE_FIRST_BOOT = True
+try:
+ model_assignment = db.session.query(ModelAssignment).filter(ModelAssignment.service_name == SERVICE_NAME).one()
+ SERVICE_FIRST_BOOT = model_assignment.first_boot
+except:
+ pass
diff --git a/models/model_assignment.py b/models/model_assignment.py
new file mode 100644
index 0000000..345f16d
--- /dev/null
+++ b/models/model_assignment.py
@@ -0,0 +1,25 @@
+from sqlalchemy import (
+ Column, String, Boolean, UniqueConstraint
+)
+
+from models import db
+
+
+class ModelAssignment(db.ModelBase):
+ __tablename__ = 'model_assignments'
+ __table_args__ = (
+ UniqueConstraint('service_name'),
+ {'mysql_engine': 'InnoDB'}
+ )
+
+ service_name = Column(String(512), primary_key=True)
+ model_path = Column(String(512), nullable=False)
+ first_boot = Column(Boolean(), nullable=False)
+
+ @property
+ def serialize(self):
+ return {
+ 'service_name': self.service_name,
+ 'model_path': self.model_path,
+ 'first_boot': self.first_boot
+ }
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..d1b09b9
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,7 @@
+fluent-logger==0.9.3
+python-json-logger==0.1.9
+grpcio==1.13.0
+grpcio-tools==1.13.0
+PyMySQL==0.8.0
+SQLAlchemy==1.2.7
+PyYAML==3.12
diff --git a/template/predict.py b/template/predict.py
new file mode 100644
index 0000000..f917176
--- /dev/null
+++ b/template/predict.py
@@ -0,0 +1,128 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import traceback
+import csv
+import os
+import io
+
+from enum import Enum
+
+from drucker.logger.logger_jsonlogger import SystemLogger
+from drucker.core.predict_interface import PredictInterface, PredictLabel, PredictResult, EvaluateResult
+from drucker.utils.env_loader import SERVICE_LEVEL_ENUM, APPLICATION_NAME
+from drucker.models import get_model_path, SERVICE_FIRST_BOOT
+
+### Expansion start. You can add your necessity libraries.
+import numpy as np
+from sklearn.metrics import accuracy_score, precision_recall_fscore_support
+
+import zipfile
+from sklearn.externals import joblib
+
+
+def joblib_load_from_zip(zip_name: str, file_name: str):
+ with zipfile.ZipFile(zip_name, 'r') as zf:
+ with zf.open(file_name, 'r') as zipmodel:
+ return joblib.load(io.BufferedReader(io.BytesIO(zipmodel.read())))
+### Expansion end.
+
+
+class Predict(PredictInterface):
+ def __init__(self):
+ super().__init__()
+ self.predictor = None
+ self.logger = SystemLogger(logger_name="drucker_predict", app_name=APPLICATION_NAME, app_env=SERVICE_LEVEL_ENUM)
+ self.load_model(get_model_path())
+
+ def set_type(self, type_input: Enum, type_output: Enum) -> None:
+ super().set_type(type_input, type_output)
+
+ def get_type_input(self) -> Enum:
+ return super().get_type_input()
+
+ def get_type_output(self) -> Enum:
+ return super().get_type_output()
+
+ def load_model(self, model_path: str = None) -> None:
+ """ override
+ Load ML model.
+
+ :param model_path:
+ :return:
+ """
+ assert model_path is not None, \
+ 'Please specify your ML model path'
+ try:
+ # FIXME: This is an example. Implement HERE!
+ self.predictor = joblib.load(model_path)
+ # FIXME: This is Another example. You can use archived file if your algorithm requires some files.
+ # MODEL_NAME = "20180206"
+ # zip_name = MODEL_HOME + MODEL_NAME + ".zip"
+ # file_name = MODEL_NAME+'/default.model'
+ # self.predictor = joblib_load_from_zip(zip_name, file_name)
+
+ except Exception as e:
+ self.logger.error(str(e))
+ self.logger.error(traceback.format_exc())
+ self.predictor = None
+ if not SERVICE_FIRST_BOOT:
+ # noinspection PyProtectedMember
+ os._exit(-1)
+
+ def predict(self, input: PredictLabel, option: dict = None) -> PredictResult:
+ """ override
+ Predict.
+
+ :param input: Input data. string/bytes/arr[int]/arr[float]/arr[string]
+ :param option: Miscellaneous. dict
+ :return:
+ output: Result. string/bytes/arr[int]/arr[float]/arr[string]
+ score: Score. float/arr[float]
+ option: Miscellaneous. dict
+ """
+ try:
+ # FIXME: This is an example. Implement HERE!
+ if option is None:
+ option = {}
+ label_predict = self.predictor.predict(
+ np.array([input], dtype='float64')).tolist()
+ return PredictResult(label_predict, [1] * len(label_predict), option={})
+ except Exception as e:
+ self.logger.error(str(e))
+ self.logger.error(traceback.format_exc())
+ raise e
+
+ def evaluate(self, file: bytes) -> EvaluateResult:
+ """ override
+ Evaluate.
+ :TODO: in detail.
+
+ :param file: Evaluation data file. bytes
+ :return:
+ num: Number of data. int
+ accuracy: Accuracy. float
+ precision: Precision. arr[float]
+ recall: Recall. arr[float]
+ fvalue: F1 value. arr[float]
+ """
+ try:
+ # FIXME: This is an example. Implement HERE!
+ f = io.StringIO(file.decode("utf-8"))
+ reader = csv.reader(f, delimiter=",")
+ num = 0
+ label_gold = []
+ label_predict = []
+ for row in reader:
+ num += 1
+ label_gold.append(int(row[0]))
+ result = self.predict(row[1:], option={})
+ label_predict.append(result.label)
+
+ accuracy = accuracy_score(label_gold, label_predict)
+ p_r_f = precision_recall_fscore_support(label_gold, label_predict)
+ return EvaluateResult(num, accuracy, p_r_f[0].tolist(), p_r_f[1].tolist(), p_r_f[2].tolist())
+ except Exception as e:
+ self.logger.error(str(e))
+ self.logger.error(traceback.format_exc())
+ return EvaluateResult()
diff --git a/template/server.py b/template/server.py
new file mode 100644
index 0000000..77427c5
--- /dev/null
+++ b/template/server.py
@@ -0,0 +1,41 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+from concurrent import futures
+import grpc
+import drucker_pb2_grpc
+import time
+
+from drucker.core.drucker_dashboard_servicer import DruckerDashboardServicer
+from drucker.core.drucker_worker_servicer import DruckerWorkerServicer
+from drucker.logger.logger_jsonlogger import SystemLogger, ServiceLogger
+from predict import Predict
+from drucker.utils.env_loader import SERVICE_LEVEL_ENUM, APPLICATION_NAME, SERVICE_PORT
+
+_ONE_DAY_IN_SECONDS = 60 * 60 * 24
+
+
+def serve():
+ system_logger = SystemLogger(logger_name="drucker", app_name=APPLICATION_NAME, app_env=SERVICE_LEVEL_ENUM)
+ service_logger = ServiceLogger(app_name=APPLICATION_NAME, app_env=SERVICE_LEVEL_ENUM)
+ predictor = Predict()
+ system_logger.info("Wake-up drucker worker.")
+
+ server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
+
+ drucker_pb2_grpc.add_DruckerDashboardServicer_to_server(
+ DruckerDashboardServicer(logger=system_logger, predictor=predictor), server)
+ drucker_pb2_grpc.add_DruckerWorkerServicer_to_server(
+ DruckerWorkerServicer(logger=service_logger, predictor=predictor), server)
+ server.add_insecure_port("[::]:{0}".format(SERVICE_PORT))
+ server.start()
+ try:
+ while True:
+ time.sleep(_ONE_DAY_IN_SECONDS)
+ except KeyboardInterrupt:
+ system_logger.info("Shutdown drucker worker.")
+ server.stop(0)
+
+
+if __name__ == '__main__':
+ serve()
diff --git a/template/settings.yml b/template/settings.yml
new file mode 100644
index 0000000..9172486
--- /dev/null
+++ b/template/settings.yml
@@ -0,0 +1,54 @@
+# (Mainly) For non-kubernetes users.
+# Define the parameters below.
+
+# This must be unique.
+# You can also determine an environment variable "DRUCKER_APPLICATION_NAME".
+# Priority is...
+# DRUCKER_APPLICATION_NAME > app.name
+app.name: drucker-sample
+
+# You can also determine an environment variable "DRUCKER_SERVICE_PORT".
+# Priority is...
+# DRUCKER_SERVICE_PORT > app.port
+app.port: 5000
+
+# This must be unique.
+# You can also determine an environment variable "DRUCKER_SERVICE_NAME".
+# Priority is...
+# DRUCKER_SERVICE_NAME > app.service.name
+app.service.name: dev-001
+
+# This must be [development/beta/staging/production]
+# You can also determine an environment variable "DRUCKER_SERVICE_LEVEL".
+# Priority is...
+# DRUCKER_SERVICE_LEVEL > app.service.level
+app.service.level: development
+
+# ML model
+# Put your model file under "{app.modeldir}/{app.name}/{app.modelfile}".
+# If you use Drucker-manager, {app.modelfile} will be set by Drucker-manager.
+# You can use the environment variables,
+# Priority is...
+# DRUCKER_SERVICE_MODEL_DIR > app.modeldir
+# DB entry > DRUCKER_SERVICE_MODEL_FILE > app.modelfile
+app.modeldir: ./model
+app.modelfile: default.model
+
+# DB
+# "use.db" must be [sqlite(default)/mysql].
+# If you use Drucker on Kubernetes, you must set "use.db" to "mysql" and prepare MySQL server.
+# You don't need to setup DB. Drucker will do it.
+# You can use the environment variables,
+# Priority is...
+# DRUCKER_DB_MODE > use.db
+# DRUCKER_DB_MYSQL_HOST > db.mysql.host
+# DRUCKER_DB_MYSQL_PORT > db.mysql.port
+# DRUCKER_DB_MYSQL_DBNAME > db.mysql.dbname
+# DRUCKER_DB_MYSQL_USER > db.mysql.user
+# DRUCKER_DB_MYSQL_PASSWORD > db.mysql.password
+use.db: sqlite
+db.mysql.host: localhost
+db.mysql.port: 3306
+db.mysql.dbname: assignment
+db.mysql.user: user
+db.mysql.password: pass
diff --git a/template/start.sh b/template/start.sh
new file mode 100755
index 0000000..f0db6bd
--- /dev/null
+++ b/template/start.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env bash
+
+ECHO_PREFIX="[drucker example]: "
+
+set -e
+set -u
+
+echo "$ECHO_PREFIX Start.."
+
+pip install -r ./drucker-grpc-proto/requirements.txt
+python ./drucker-grpc-proto/run_codegen.py
+
+pip install -r ./drucker/requirements.txt
+
+pip install -r requirements.txt
+python server.py
diff --git a/utils/env_loader.py b/utils/env_loader.py
new file mode 100644
index 0000000..4bf1239
--- /dev/null
+++ b/utils/env_loader.py
@@ -0,0 +1,50 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+from enum import Enum
+import yaml
+import os
+
+
+class ServiceEnvType(Enum):
+ DEVELOPMENT = 'development'
+ BETA = 'beta'
+ STAGING = 'staging'
+ PRODUCTION = 'production'
+
+ @classmethod
+ def to_Enum(cls, istr: str):
+ if cls.DEVELOPMENT.value == istr:
+ return cls.DEVELOPMENT
+ elif cls.BETA.value == istr:
+ return cls.BETA
+ elif cls.STAGING.value == istr:
+ return cls.STAGING
+ elif cls.PRODUCTION == istr:
+ return cls.PRODUCTION
+ else:
+ return None
+
+
+TEST_MODE = False if os.getenv("DRUCKER_TEST_MODE", None) is None else True
+
+SETTINGS_YAML = os.getenv("DRUCKER_SETTINGS_YAML", "settings.yml")
+config = yaml.load(open(SETTINGS_YAML, 'r'))
+
+SERVICE_PORT = os.getenv("DRUCKER_SERVICE_PORT", config.get("app.port", "5000"))
+
+APPLICATION_NAME = os.getenv("DRUCKER_APPLICATION_NAME", config["app.name"])
+SERVICE_NAME = os.getenv("DRUCKER_SERVICE_NAME", config["app.service.name"])
+SERVICE_LEVEL = os.getenv("DRUCKER_SERVICE_LEVEL", config["app.service.level"])
+SERVICE_LEVEL_ENUM = ServiceEnvType.to_Enum(SERVICE_LEVEL)
+SERVICE_INFRA = os.getenv("DRUCKER_SERVICE_INFRA", "default")
+
+DIR_MODEL = os.getenv("DRUCKER_SERVICE_MODEL_DIR", config.get("app.modeldir", "./model"))
+FILE_MODEL = os.getenv("DRUCKER_SERVICE_MODEL_FILE", config.get("app.modelfile", "default.model"))
+
+DB_MODE = os.getenv('DRUCKER_DB_MODE', config.get('use.db', "sqlite"))
+DB_MYSQL_HOST = os.getenv('DRUCKER_DB_MYSQL_HOST', config.get('db.mysql.host', ""))
+DB_MYSQL_PORT = os.getenv('DRUCKER_DB_MYSQL_PORT', config.get('db.mysql.port', ""))
+DB_MYSQL_DBNAME = os.getenv('DRUCKER_DB_MYSQL_DBNAME', config.get('db.mysql.dbname', ""))
+DB_MYSQL_USER = os.getenv('DRUCKER_DB_MYSQL_USER', config.get('db.mysql.user', ""))
+DB_MYSQL_PASSWORD = os.getenv('DRUCKER_DB_MYSQL_PASSWORD', config.get('db.mysql.password', ""))