mseth10 commented on a change in pull request #15886: Graph Partition API
URL: https://github.com/apache/incubator-mxnet/pull/15886#discussion_r317239988
##########
File path: tests/python/unittest/test_subgraph_op.py
##########
@@ -146,11 +146,135 @@ def get_executor(sym, subgraph_backend=None,
op_names=None, original_exec=None):
for i in range(len(outputs1)):
assert_almost_equal((outputs1[i] -
outputs2[i]).abs().sum().asnumpy(), np.zeros(shape=(1,)))
+ def _check_subgraph_exe5(sym, subgraph_backend, op_names):
+ """Call optimize_for to trigger graph partitioning without infer
shapes/types before,
+ then simple_bind and compare results of the partitioned sym and the
original sym."""
+ # simple_bind
+ exe1 = sym.simple_bind(ctx=mx.current_context(), grad_req='null')
+ input_names = sym.list_inputs()
+ for name in input_names:
+ if name in exe1.arg_dict:
+ exe1.arg_dict[name][:] =
mx.nd.random.uniform(shape=exe1.arg_dict[name].shape)
+ else:
+ assert name in exe1.aux_dict
+ exe1.aux_dict[name][:] =
mx.nd.random.uniform(shape=exe1.aux_dict[name].shape)
+ exe1.forward()
+
+ # partition before simple_bind
+ check_call(_LIB.MXSetSubgraphPropertyOpNames(c_str(subgraph_backend),
mx_uint(len(op_names)),
+ c_str_array(op_names)))
+ part_sym = sym.optimize_for(subgraph_backend)
+
check_call(_LIB.MXRemoveSubgraphPropertyOpNames(c_str(subgraph_backend)))
+ exe2 = part_sym.simple_bind(ctx=mx.current_context(), grad_req='null')
+ for name in input_names:
Review comment:
The loop assigns random inputs to executor generated using simple_bind. Both
executors share the same set of inputs. Refactored the code and added comments.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services