josephevans commented on a change in pull request #19697:
URL: https://github.com/apache/incubator-mxnet/pull/19697#discussion_r548194878
##########
File path: tests/python-pytest/onnx/test_onnxruntime.py
##########
@@ -97,25 +102,79 @@ def download_test_images(tmpdir):
tmp_path = str(tmp_path)
- #labels = load_imgnet_labels(tmp_path)
- test_images = download_test_images(tmp_path)
- sym_file, params_file = get_gluon_cv_model(model, tmp_path)
- onnx_file = export_model_to_onnx(sym_file, params_file)
-
- # create onnxruntime session using the generated onnx file
- ses_opt = onnxruntime.SessionOptions()
- ses_opt.log_severity_level = 3
- session = onnxruntime.InferenceSession(onnx_file, ses_opt)
- input_name = session.get_inputs()[0].name
-
- for img, accepted_ids in test_images:
- img_data = normalize_image(os.path.join(tmp_path,img))
- raw_result = session.run([], {input_name: img_data})
- res = softmax(np.array(raw_result)).tolist()
- class_idx = np.argmax(res)
- assert(class_idx in accepted_ids)
-
- shutil.rmtree(tmp_path)
-
+ try:
+ #labels = load_imgnet_labels(tmp_path)
+ test_images = download_test_images(tmp_path)
+ sym_file, params_file = get_gluon_cv_model(model, tmp_path)
+ onnx_file = export_model_to_onnx(sym_file, params_file)
+
+ # create onnxruntime session using the generated onnx file
+ ses_opt = onnxruntime.SessionOptions()
+ ses_opt.log_severity_level = 3
+ session = onnxruntime.InferenceSession(onnx_file, ses_opt)
+ input_name = session.get_inputs()[0].name
+
+ for img, accepted_ids in test_images:
+ img_data = normalize_image(os.path.join(tmp_path,img))
+ raw_result = session.run([], {input_name: img_data})
+ res = softmax(np.array(raw_result)).tolist()
+ class_idx = np.argmax(res)
+ assert(class_idx in accepted_ids)
+
+ finally:
+ shutil.rmtree(tmp_path)
+
+
+@with_seed()
[email protected]('model', ['bert_12_768_12'])
+def test_bert_inference_onnxruntime(tmp_path, model):
+ tmp_path = str(tmp_path)
+ try:
+ import gluonnlp as nlp
+ dataset = 'book_corpus_wiki_en_uncased'
+ ctx = mx.cpu(0)
+ model, vocab = nlp.model.get_model(
+ name=model,
+ ctx=ctx,
+ dataset_name=dataset,
+ pretrained=False,
+ use_pooler=True,
+ use_decoder=False,
+ use_classifier=False)
+ model.initialize(ctx=ctx)
+ model.hybridize(static_alloc=True)
+
+ batch = 5
+ seq_length = 16
+ # create synthetic test data
+ inputs = mx.nd.random.uniform(0, 30522, shape=(batch, seq_length),
dtype='float32')
+ token_types = mx.nd.random.uniform(0, 2, shape=(batch, seq_length),
dtype='float32')
+ valid_length = mx.nd.array([seq_length] * batch, dtype='float32')
+
+ seq_encoding, cls_encoding = model(inputs, token_types, valid_length)
+
+ prefix = "%s/bert" % tmp_path
+ model.export(prefix)
+ sym_file = "%s-symbol.json" % prefix
+ params_file = "%s-0000.params" % prefix
+ onnx_file = "%s.onnx" % prefix
+
+
+ input_shapes = [(batch, seq_length), (batch, seq_length), (batch,)]
+ converted_model_path = mx.contrib.onnx.export_model(sym_file,
params_file, input_shapes, np.float32, onnx_file)
+
+
+ # create onnxruntime session using the generated onnx file
+ ses_opt = onnxruntime.SessionOptions()
+ ses_opt.log_severity_level = 3
+ session = onnxruntime.InferenceSession(onnx_file, ses_opt)
+ onnx_inputs = [inputs, token_types, valid_length]
+ input_dict = dict((session.get_inputs()[i].name,
onnx_inputs[i].asnumpy()) for i in range(len(onnx_inputs)))
+ pred_onx = session.run(None, input_dict)[0]
+
+ assert_almost_equal(seq_encoding, pred_onx)
Review comment:
Added!
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]