From 769fa3ea037a7cc7e3a753fd9338ad961cf07910 Mon Sep 17 00:00:00 2001 From: Wenhao Hu Date: Thu, 4 Oct 2018 16:40:03 +0900 Subject: [PATCH 1/5] reuse sess in backend --- onnx_tf/backend_rep.py | 49 +++++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/onnx_tf/backend_rep.py b/onnx_tf/backend_rep.py index 0236270e8..dc1dbda6d 100644 --- a/onnx_tf/backend_rep.py +++ b/onnx_tf/backend_rep.py @@ -15,6 +15,7 @@ def __init__(self, graph=None, inputs=None, outputs=None, tensor_dict=None): self._graph = graph self._inputs = inputs or [] self._outputs = outputs or [] + self._sess = None self._tensor_dict = tensor_dict or {} @property @@ -60,30 +61,30 @@ def run(self, inputs, **kwargs): # TODO: handle name scope if necessary with self.graph.as_default(): - with tf.Session() as sess: - if isinstance(inputs, dict): - feed_dict = inputs - elif isinstance(inputs, list) or isinstance(inputs, tuple): - if len(self.inputs) != len(inputs): - raise RuntimeError('Expected {} values for uninitialized ' - 'graph inputs ({}), but got {}.'.format( - len(self.inputs), ', '.join(self.inputs), - len(inputs))) - feed_dict = dict(zip(self.inputs, inputs)) - else: - # single input - feed_dict = dict([(self.inputs[0], inputs)]) - - feed_dict = { - self.tensor_dict[key]: feed_dict[key] - for key in self.inputs - } - - sess.run(tf.global_variables_initializer()) - outputs = [self.tensor_dict[output] for output in self.outputs] - - output_values = sess.run(outputs, feed_dict=feed_dict) - return namedtupledict('Outputs', self.outputs)(*output_values) + sess = self._sess or tf.Session() + self._sess = sess + + if isinstance(inputs, dict): + feed_dict = inputs + elif isinstance(inputs, list) or isinstance(inputs, tuple): + if len(self.inputs) != len(inputs): + raise RuntimeError('Expected {} values for uninitialized ' + 'graph inputs ({}), but got {}.'.format( + len(self.inputs), ', '.join(self.inputs), + len(inputs))) + feed_dict = dict(zip(self.inputs, inputs)) + else: + # single input + feed_dict = dict([(self.inputs[0], inputs)]) + + feed_dict = {self.tensor_dict[key]: feed_dict[key] for key in self.inputs} + + sess.run(tf.global_variables_initializer()) + outputs = [self.tensor_dict[output] for output in self.outputs] + + output_values = sess.run(outputs, feed_dict=feed_dict) + + return namedtupledict('Outputs', self.outputs)(*output_values) def export_graph(self, path): """Export backend representation to a Tensorflow proto file. From ac552c056a5b16bc00d63a00ffd8b05f9fb7c68d Mon Sep 17 00:00:00 2001 From: Wenhao Hu Date: Fri, 5 Oct 2018 09:43:57 +0900 Subject: [PATCH 2/5] add sess arg to run and add create_session function --- onnx_tf/backend_rep.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/onnx_tf/backend_rep.py b/onnx_tf/backend_rep.py index dc1dbda6d..f03fd7d32 100644 --- a/onnx_tf/backend_rep.py +++ b/onnx_tf/backend_rep.py @@ -15,7 +15,6 @@ def __init__(self, graph=None, inputs=None, outputs=None, tensor_dict=None): self._graph = graph self._inputs = inputs or [] self._outputs = outputs or [] - self._sess = None self._tensor_dict = tensor_dict or {} @property @@ -50,10 +49,12 @@ def tensor_dict(self): def tensor_dict(self, tensor_dict): self._tensor_dict = tensor_dict - def run(self, inputs, **kwargs): + def run(self, inputs, sess=None, **kwargs): """ Run TensorflowRep. :param inputs: Given inputs. + :param sess: tf.Session. The environment in which Operation objects are executed, + and Tensor objects are evaluated. :param kwargs: Other args. :return: Outputs. """ @@ -61,8 +62,7 @@ def run(self, inputs, **kwargs): # TODO: handle name scope if necessary with self.graph.as_default(): - sess = self._sess or tf.Session() - self._sess = sess + sess = sess or tf.Session() if isinstance(inputs, dict): feed_dict = inputs @@ -101,3 +101,12 @@ def export_graph(self, path): file = open(path, "wb") file.write(graph_proto.SerializeToString()) file.close() + + def create_session(self): + """ Create tf.Session object by using current graph. + Pass it to `run` function could reduce the overhead of initialization + when doing inference consecutively. + + :returns: A Session object. + """ + return tf.Session(self.graph) From a9c6de6b0a6de52208ff56dc5de2b738dead8655 Mon Sep 17 00:00:00 2001 From: Wenhao Hu Date: Sat, 6 Oct 2018 10:03:57 +0900 Subject: [PATCH 3/5] bug fix --- onnx_tf/backend_rep.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onnx_tf/backend_rep.py b/onnx_tf/backend_rep.py index f03fd7d32..943a218ec 100644 --- a/onnx_tf/backend_rep.py +++ b/onnx_tf/backend_rep.py @@ -109,4 +109,4 @@ def create_session(self): :returns: A Session object. """ - return tf.Session(self.graph) + return tf.Session(graph=self.graph) From 42e8be25ed7ab63a54c25384b9bb7c364bdca39d Mon Sep 17 00:00:00 2001 From: Wenhao Hu Date: Tue, 9 Oct 2018 11:19:16 +0900 Subject: [PATCH 4/5] bug fix --- onnx_tf/backend_rep.py | 45 +++++++++++++++++++++--------------------- 1 file changed, 22 insertions(+), 23 deletions(-) diff --git a/onnx_tf/backend_rep.py b/onnx_tf/backend_rep.py index 943a218ec..0be0d592b 100644 --- a/onnx_tf/backend_rep.py +++ b/onnx_tf/backend_rep.py @@ -62,29 +62,28 @@ def run(self, inputs, sess=None, **kwargs): # TODO: handle name scope if necessary with self.graph.as_default(): - sess = sess or tf.Session() - - if isinstance(inputs, dict): - feed_dict = inputs - elif isinstance(inputs, list) or isinstance(inputs, tuple): - if len(self.inputs) != len(inputs): - raise RuntimeError('Expected {} values for uninitialized ' - 'graph inputs ({}), but got {}.'.format( - len(self.inputs), ', '.join(self.inputs), - len(inputs))) - feed_dict = dict(zip(self.inputs, inputs)) - else: - # single input - feed_dict = dict([(self.inputs[0], inputs)]) - - feed_dict = {self.tensor_dict[key]: feed_dict[key] for key in self.inputs} - - sess.run(tf.global_variables_initializer()) - outputs = [self.tensor_dict[output] for output in self.outputs] - - output_values = sess.run(outputs, feed_dict=feed_dict) - - return namedtupledict('Outputs', self.outputs)(*output_values) + with sess or tf.Session() as sess: + if isinstance(inputs, dict): + feed_dict = inputs + elif isinstance(inputs, list) or isinstance(inputs, tuple): + if len(self.inputs) != len(inputs): + raise RuntimeError('Expected {} values for uninitialized ' + 'graph inputs ({}), but got {}.'.format( + len(self.inputs), ', '.join(self.inputs), + len(inputs))) + feed_dict = dict(zip(self.inputs, inputs)) + else: + # single input + feed_dict = dict([(self.inputs[0], inputs)]) + + feed_dict = {self.tensor_dict[key]: feed_dict[key] for key in self.inputs} + + sess.run(tf.global_variables_initializer()) + outputs = [self.tensor_dict[output] for output in self.outputs] + + output_values = sess.run(outputs, feed_dict=feed_dict) + + return namedtupledict('Outputs', self.outputs)(*output_values) def export_graph(self, path): """Export backend representation to a Tensorflow proto file. From 547c16823a14d472ae570053e387ae1ee7178c74 Mon Sep 17 00:00:00 2001 From: Wenhao Hu Date: Tue, 9 Oct 2018 11:47:38 +0900 Subject: [PATCH 5/5] bug fix --- onnx_tf/backend_rep.py | 46 ++++++++++++++++++++++-------------------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/onnx_tf/backend_rep.py b/onnx_tf/backend_rep.py index 0be0d592b..71ea39064 100644 --- a/onnx_tf/backend_rep.py +++ b/onnx_tf/backend_rep.py @@ -60,30 +60,32 @@ def run(self, inputs, sess=None, **kwargs): """ super(TensorflowRep, self).run(inputs, **kwargs) + should_close_sess = sess is None # TODO: handle name scope if necessary with self.graph.as_default(): - with sess or tf.Session() as sess: - if isinstance(inputs, dict): - feed_dict = inputs - elif isinstance(inputs, list) or isinstance(inputs, tuple): - if len(self.inputs) != len(inputs): - raise RuntimeError('Expected {} values for uninitialized ' - 'graph inputs ({}), but got {}.'.format( - len(self.inputs), ', '.join(self.inputs), - len(inputs))) - feed_dict = dict(zip(self.inputs, inputs)) - else: - # single input - feed_dict = dict([(self.inputs[0], inputs)]) - - feed_dict = {self.tensor_dict[key]: feed_dict[key] for key in self.inputs} - - sess.run(tf.global_variables_initializer()) - outputs = [self.tensor_dict[output] for output in self.outputs] - - output_values = sess.run(outputs, feed_dict=feed_dict) - - return namedtupledict('Outputs', self.outputs)(*output_values) + sess = sess or tf.Session() + if isinstance(inputs, dict): + feed_dict = inputs + elif isinstance(inputs, list) or isinstance(inputs, tuple): + if len(self.inputs) != len(inputs): + raise RuntimeError('Expected {} values for uninitialized ' + 'graph inputs ({}), but got {}.'.format( + len(self.inputs), ', '.join(self.inputs), + len(inputs))) + feed_dict = dict(zip(self.inputs, inputs)) + else: + # single input + feed_dict = dict([(self.inputs[0], inputs)]) + + feed_dict = {self.tensor_dict[key]: feed_dict[key] for key in self.inputs} + + sess.run(tf.global_variables_initializer()) + outputs = [self.tensor_dict[output] for output in self.outputs] + + output_values = sess.run(outputs, feed_dict=feed_dict) + if should_close_sess: + sess.close() + return namedtupledict('Outputs', self.outputs)(*output_values) def export_graph(self, path): """Export backend representation to a Tensorflow proto file.