TaoLv commented on a change in pull request #15621: MKL-DNN LBR-GRU Inference
Integration (FP32 LBR-GRU)
URL: https://github.com/apache/incubator-mxnet/pull/15621#discussion_r309960896
##########
File path: src/operator/nn/mkldnn/mkldnn_rnn_impl.h
##########
@@ -225,55 +241,66 @@ static void MKLDNNRNNForwardSingleLayerBi(bool
state_outputs,
mkldnn::memory::dims weights_layer_r_tz = {1, 1, I, ngates, H}; // ldigo
for reorder
mkldnn::memory::dims weights_iter_tz = {1, 2, H, ngates, H}; // ldigo
mkldnn::memory::dims weights_iter_r_tz = {1, 1, H, ngates, H}; // ldigo
for reorder
- mkldnn::memory::dims bias_tz = {1, 2, ngates, H};
+ mkldnn::memory::dims bias_tz = {1, 2, nbias, H}; // ldgo
mkldnn::memory::dims src_iter_tz = {1, 2, nstates, N, H}; // ldsnc
mkldnn::memory::dims dst_iter_tz = {1, 2, nstates, N, H}; // ldsnc
- if (!initialized) {
+ bool has_adjusted = false;
+ if (!initialized || is_train) {
if (mode == rnn_enum::kGru) {
AdjustGruWeightGateOrder(wx, I, H);
AdjustGruWeightGateOrder(back_wx, I, H);
AdjustGruWeightGateOrder(wh, H, H);
AdjustGruWeightGateOrder(back_wh, H, H);
- AdjustGruBiasGateOrder(bx, H);
- AdjustGruBiasGateOrder(back_bx, H);
- AdjustGruBiasGateOrder(bh, H);
- AdjustGruBiasGateOrder(back_bh, H);
+ has_adjusted = true;
}
- auto src_wx = (*concat_weight_memory)[2 * layer_index];
- auto src_wh = (*concat_weight_memory)[2 * layer_index + 1];
+ auto src_wx = mkldnn_mems->concat_weight_memory[2 * layer_index];
+ auto src_wh = mkldnn_mems->concat_weight_memory[2 * layer_index + 1];
Review comment:
reference or copy?
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services