Skip to content
This repository has been archived by the owner on Oct 13, 2021. It is now read-only.

Commit

Permalink
Deprecate python 3.5 CI (update to 3.6) (#686)
Browse files Browse the repository at this point in the history
  • Loading branch information
jiafatom authored Jan 25, 2021
1 parent 78d58b5 commit 4d7e526
Show file tree
Hide file tree
Showing 7 changed files with 83 additions and 20 deletions.
6 changes: 3 additions & 3 deletions .azure-pipelines/linux-CI-keras-applications-nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ jobs:
vmImage: 'Ubuntu-16.04'
strategy:
matrix:
Python35:
python.version: '3.5'
Python36-onnx1.2:
python.version: '3.6'
ONNX_PATH: onnx==1.2.3
INSTALL_KERAS: pip install keras==2.1.6
UNINSTALL_KERAS:
Expand All @@ -25,7 +25,7 @@ jobs:
INSTALL_TRANSFORMERS:
NIGHTLY_BUILD_TEST: python run_all.py --exclude "test_keras_applications_v2.py"

Python36:
Python36-onnx1.5:
python.version: '3.6'
ONNX_PATH: onnx==1.5.0
INSTALL_KERAS: pip install keras==2.2.4
Expand Down
6 changes: 3 additions & 3 deletions .azure-pipelines/linux-conda-CI-tf-keras.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@ jobs:
strategy:
matrix:
# No python 2.x since tf2onnx does not support it
Python35:
python.version: '3.5'
Python36-onnx1.2:
python.version: '3.6'
ONNX_PATH: onnx==1.2.3
TENSORFLOW_PATH: tensorflow==1.11.0
INSTALL_ORT: pip install onnxruntime==1.1.1

Python36:
Python36-onnx1.5:
python.version: '3.6'
ONNX_PATH: onnx==1.5.0
TENSORFLOW_PATH: tensorflow==1.15.0
Expand Down
4 changes: 2 additions & 2 deletions .azure-pipelines/linux-conda-CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ jobs:
vmImage: 'Ubuntu-16.04'
strategy:
matrix:
Python35-tf1110:
python.version: '3.5'
Python36-tf1110:
python.version: '3.6'
ONNX_PATH: onnx==1.2.3
KERAS: keras==2.1.6
TENSORFLOW_PATH: tensorflow==1.11.0
Expand Down
6 changes: 3 additions & 3 deletions .azure-pipelines/win32-CI-keras-applications-nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ jobs:
strategy:
matrix:
# No python 2.x since no available ONNX package for Windows
Python35:
python.version: '3.5'
Python36-onnx1.2:
python.version: '3.6'
ONNX_PATH: onnx==1.2.3
INSTALL_KERAS: pip install keras==2.1.6
UNINSTALL_KERAS:
Expand All @@ -26,7 +26,7 @@ jobs:
INSTALL_TRANSFORMERS:
NIGHTLY_BUILD_TEST: python run_all.py --exclude "test_keras_applications_v2.py test_mask_rcnn.py"

Python36:
Python36-onnx1.5:
python.version: '3.6'
ONNX_PATH: onnx==1.5.0
INSTALL_KERAS: pip install keras==2.2.4
Expand Down
6 changes: 3 additions & 3 deletions .azure-pipelines/win32-conda-CI-tf-keras.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@ jobs:
strategy:
matrix:
# No python 2.x since no available ONNX package for Windows
Python35:
python.version: '3.5'
Python36-onnx1.2:
python.version: '3.6'
ONNX_PATH: onnx==1.2.3
TENSORFLOW_PATH: tensorflow==1.11.0
INSTALL_ORT: pip install onnxruntime==1.1.1

Python36:
Python36-onnx1.5:
python.version: '3.6'
ONNX_PATH: onnx==1.5.0
TENSORFLOW_PATH: tensorflow==1.14.0
Expand Down
4 changes: 2 additions & 2 deletions .azure-pipelines/win32-conda-CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ jobs:
strategy:
matrix:
# No python 2.x since no available ONNX package for Windows
Python35-tf1110:
python.version: '3.5'
Python36-tf1110:
python.version: '3.6'
ONNX_PATH: onnx==1.2.3
KERAS: keras==2.1.6
TENSORFLOW_PATH: tensorflow==1.11.0
Expand Down
71 changes: 67 additions & 4 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from keras2onnx.proto.tfcompat import is_tf2
from keras2onnx.common.onnx_ops import apply_identity, OnnxOperatorBuilder
import time
import json

working_path = os.path.abspath(os.path.dirname(__file__))
tmp_path = os.path.join(working_path, 'temp')
Expand Down Expand Up @@ -111,14 +112,66 @@ def print_mismatches(case_name, list_idx, expected_list, actual_list, rtol=1.e-3
file=sys.stderr)


def run_onnx_runtime(case_name, onnx_model, data, expected, model_files, rtol=1.e-3, atol=1.e-6, compare_perf=False):
def load_profile_json(profile_file):
print(f"loading profile output {profile_file} ...")

with open(profile_file, "r") as f:
sess_time = json.load(f)

assert isinstance(sess_time, list)
return sess_time


def parse_profile_results(sess_time, kernel_time_only=False, threshold=0):
node_time = {}
node_provider = {}
total = 0
for item in sess_time:
if item["cat"] == "Node" and "dur" in item and "args" in item and "op_name" in item["args"]:
if "provider" in item["args"]:
device = "CPU" if item["args"]["provider"] == "CPUExecutionProvider" else "CUDA"
if item["name"] not in node_provider:
node_provider[item["name"]] = device
else:
assert node_provider[item["name"]] == device
elif kernel_time_only:
continue

if item["name"] in node_time:
node_time[item["name"]] += item["dur"]
else:
node_time[item["name"]] = item["dur"]
total += item["dur"]

results = []
if (threshold > 0):
results.append(f"Threshold of Percentage > {threshold:.2f}%")

results.append(f"Duration\tPercentage\tProvider\tName")
for k, v in sorted(node_time.items(), key=lambda x: x[1], reverse=True):
provider = node_provider[k] if k in node_provider else ""
ratio = v / total
if ratio > threshold:
results.append(f"{v}\t{ratio * 100.0:5.2f}\t{provider}\t{k}")

return results


def run_onnx_runtime(case_name, onnx_model, data, expected, model_files, rtol=1.e-3, atol=1.e-6,
compare_perf=False, enable_profiling=False):
if not os.path.exists(tmp_path):
os.mkdir(tmp_path)
temp_model_file = os.path.join(tmp_path, 'temp_' + case_name + '.onnx')
onnx.save_model(onnx_model, temp_model_file)
try:
import onnxruntime
sess = onnxruntime.InferenceSession(temp_model_file)
if enable_profiling:
from onnxruntime import SessionOptions
sess_options = SessionOptions()
sess_options.enable_profiling = True
sess = onnxruntime.InferenceSession(temp_model_file, sess_options)
else:
sess = onnxruntime.InferenceSession(temp_model_file)
except ImportError:
keras2onnx.common.k2o_logger().warning("Cannot import ONNXRuntime!")
return True
Expand All @@ -141,6 +194,15 @@ def run_onnx_runtime(case_name, onnx_model, data, expected, model_files, rtol=1.
time_end = time.time()
print('avg ort time =' + str((time_end - time_start)/count))

if enable_profiling:
profile_file = sess.end_profiling()
profile_records = load_profile_json(profile_file)
lines = parse_profile_results(profile_records)
print("Results:")
print("-" * 64)
for line in lines:
print(line)

if expected is None:
return

Expand All @@ -163,7 +225,8 @@ def run_onnx_runtime(case_name, onnx_model, data, expected, model_files, rtol=1.
return res


def run_keras_and_ort(case_name, onnx_model, keras_model, data, expected, model_files, rtol=1.e-3, atol=1.e-6, compare_perf=False):
def run_keras_and_ort(case_name, onnx_model, keras_model, data, expected, model_files, rtol=1.e-3, atol=1.e-6,
compare_perf=False, enable_profiling=False):
if compare_perf:
count = 10
time_start = time.time()
Expand All @@ -172,7 +235,7 @@ def run_keras_and_ort(case_name, onnx_model, keras_model, data, expected, model_
time_end = time.time()
print('avg keras time =' + str((time_end - time_start) / count))
return run_onnx_runtime(case_name, onnx_model, data, expected, model_files,
rtol=rtol, atol=atol, compare_perf=compare_perf)
rtol=rtol, atol=atol, compare_perf=compare_perf, enable_profiling=enable_profiling)


def run_image(model, model_files, img_path, model_name='onnx_conversion', rtol=1.e-3, atol=1.e-5, color_mode="rgb",
Expand Down

0 comments on commit 4d7e526

Please sign in to comment.