diff --git a/model/api_calling.ipynb b/model/api_calling.ipynb new file mode 100644 index 0000000..a85dd98 --- /dev/null +++ b/model/api_calling.ipynb @@ -0,0 +1,158 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "PROJECT_NAME = \"experiment-week\"\n", + "MODEL_NAME = \"garbage_model_h5_128_custom_prep\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Data prep" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "images_test = np.load('images.npy')\n", + "images_test.shape" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Local API" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Make sure serialized version of your model with name `model.h5` is in the specified path: `models/{MODEL_NAME}`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from gcp_custom_predictor import MyPredictor" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "my_pred = MyPredictor.from_path(f'models/{MODEL_NAME}')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## GCP API" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Calling `GCP Custom prediction routines` (includes preprocessing step):
\n", + "https://cloud.google.com/ai-platform/prediction/docs/custom-prediction-routines " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import gcp_api\n", + "from gcp_api import predict_json" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def predict_instances(instances, n=-1, return_imax=True):\n", + " for row in instances[:n]:\n", + " prediction = predict_json(PROJECT_NAME, MODEL_NAME, [row.tolist()])[0]\n", + " yield np.argmax(prediction) if return_imax else prediction" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "list(predict_instances(images_test, n=6))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "list(predict_instances(images_test, n=1, return_imax=False))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/model/gcp_api.py b/model/gcp_api.py new file mode 100644 index 0000000..37e36de --- /dev/null +++ b/model/gcp_api.py @@ -0,0 +1,41 @@ +import googleapiclient.discovery +from google.oauth2 import service_account + +# authentication +SA_FILE = 'gcp_secret.json' +CREDENTIALS = service_account.Credentials.from_service_account_file(SA_FILE) +SERVICE = googleapiclient.discovery.build('ml', 'v1', credentials=CREDENTIALS) + + +def predict_json(project, model, instances, service=SERVICE, version=None): + """Send json data to a deployed model for prediction. + + Args: + project (str): project where the AI Platform Model is deployed. + model (str): model name. + instances ([Mapping[str: Any]]): Keys should be the names of Tensors + your deployed model expects as inputs. Values should be datatypes + convertible to Tensors, or (potentially nested) lists of datatypes + convertible to tensors. + service: authenticated service. + version: str, version of the model to target. + Returns: + Mapping[str: any]: dictionary of prediction results defined by the + model. + """ + # Create the AI Platform service object. + # To authenticate set the environment variable + name = 'projects/{}/models/{}'.format(project, model) + + if version is not None: + name += '/versions/{}'.format(version) + + response = service.projects().predict( + name=name, + body={'instances': instances} + ).execute() + + if 'error' in response: + raise RuntimeError(response['error']) + + return response['predictions'] diff --git a/model/gcp_custom_predictor.py b/model/gcp_custom_predictor.py new file mode 100644 index 0000000..b107836 --- /dev/null +++ b/model/gcp_custom_predictor.py @@ -0,0 +1,52 @@ +import os +import numpy as np +from tensorflow import keras + + +class MyPredictor(object): + """An example Predictor for an AI Platform custom prediction routine.""" + + def __init__(self, model): + """Stores artifacts for prediction. Only initialized via `from_path`. + """ + self._model = model + + def predict(self, instances, **kwargs): + """Performs custom prediction. + + Preprocesses inputs, then performs prediction using the trained Keras + model. + + Args: + instances: A list of prediction input instances. + **kwargs: A dictionary of keyword args provided as additional + fields on the predict request body. + + Returns: + A list of outputs containing the prediction results. + """ + inputs = np.asarray(instances) + preprocessed_inputs = keras.applications.resnet50.preprocess_input(inputs) + outputs = self._model.predict(preprocessed_inputs) + return outputs.tolist() + + @classmethod + def from_path(cls, model_dir): + """Creates an instance of MyPredictor using the given path. + + This loads artifacts that have been copied from your model directory in + Cloud Storage. MyPredictor uses them during prediction. + + Args: + model_dir: The local directory that contains the trained Keras + model and the pickled preprocessor instance. These are copied + from the Cloud Storage model directory you provide when you + deploy a version resource. + + Returns: + An instance of `MyPredictor`. + """ + model_path = os.path.join(model_dir, 'model.h5') + model = keras.models.load_model(model_path) + + return cls(model) diff --git a/model/setup.py b/model/setup.py new file mode 100644 index 0000000..29989e5 --- /dev/null +++ b/model/setup.py @@ -0,0 +1,6 @@ +from setuptools import setup + +setup( + name='gcp_custom_predictor', + version='0.1', + scripts=['gcp_custom_predictor.py'])