PawelGlomski-Intel commented on a change in pull request #20606:
URL: https://github.com/apache/incubator-mxnet/pull/20606#discussion_r724204583



##########
File path: src/operator/subgraph/dnnl/dnnl_bn_relu_property.h
##########
@@ -106,7 +105,7 @@ class SgMKLDNNBNReLUProperty : public SubgraphProperty {
     nnvm::ObjectPtr n = nnvm::Node::Create();
 
     std::ostringstream node_name;
-    node_name << "sg_mkldnn_batch_norm_relu_" << std::to_string(subgraph_id);
+    node_name << "sg_dnnl_batch_norm_relu_" << std::to_string(subgraph_id);

Review comment:
       ```suggestion
       node_name << "sg_onednn_batch_norm_relu_" << std::to_string(subgraph_id);
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_bn_relu_property.h
##########
@@ -91,8 +90,8 @@ class SgMKLDNNBNReLUProperty : public SubgraphProperty {
   }
 
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN BN + ReLU optimization pass";
-    auto property                  = 
std::make_shared<SgMKLDNNBNReLUProperty>();
+    static const std::string& name = "DNNL BN + ReLU optimization pass";

Review comment:
       ```suggestion
       static const std::string& name = "oneDNN BN + ReLU optimization pass";
   ```

##########
File path: python/mxnet/amp/lists/symbol_fp16.py
##########
@@ -611,10 +611,10 @@
 
 if Features().is_enabled('ONEDNN'):
     FP32_FUNCS.extend([
-        '_sg_mkldnn_conv',
-        '_sg_mkldnn_fully_connected',
-        '_sg_mkldnn_selfatt_qk',
-        '_sg_mkldnn_selfatt_valatt',
+        '_sg_dnnl_conv',
+        '_sg_dnnl_fully_connected',
+        '_sg_dnnl_selfatt_qk',
+        '_sg_dnnl_selfatt_valatt',

Review comment:
       ```suggestion
           '_sg_onednn_conv',
           '_sg_onednn_fully_connected',
           '_sg_onednn_selfatt_qk',
           '_sg_onednn_selfatt_valatt',
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_transformer.cc
##########
@@ -490,7 +489,7 @@ class MKLDNNSelfAttValAttOp {
                 const std::vector<NDArray>& inputs,
                 const std::vector<OpReqType>& req,
                 const std::vector<NDArray>& outputs) {
-    LOG(FATAL) << "Not implemented: subgraph mkldnn self attention val only 
supports "
+    LOG(FATAL) << "Not implemented: subgraph dnnl self attention val only 
supports "

Review comment:
       ```suggestion
       LOG(FATAL) << "Not implemented: subgraph oneDNN self attention val only 
supports "
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_post_quantize_align_scale_property.h
##########
@@ -117,13 +116,13 @@ class SgMKLDNNConcatPostQuantizeSelector : public 
SubgraphSelectorV2 {
   std::unordered_set<const nnvm::Node*> visit_list_;
 };
 
-class SgMKLDNNPostQuantizeAlignScaleProperty : public SubgraphProperty {
+class SgDNNLPostQuantizeAlignScaleProperty : public SubgraphProperty {
  public:
-  SgMKLDNNPostQuantizeAlignScaleProperty() : SubgraphProperty(kAdjust) {}
+  SgDNNLPostQuantizeAlignScaleProperty() : SubgraphProperty(kAdjust) {}
 
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN post-quantization scale alignment 
optimization pass";
-    auto property                  = 
std::make_shared<SgMKLDNNPostQuantizeAlignScaleProperty>();
+    static const std::string& name = "DNNL post-quantization scale alignment 
optimization pass";

Review comment:
       ```suggestion
       static const std::string& name = "oneDNN post-quantization scale 
alignment optimization pass";
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_subgraph_property.cc
##########
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+#if MXNET_USE_ONEDNN == 1
+
+#include "dnnl_bn_relu_property.h"
+#include "dnnl_conv_property.h"
+#include "dnnl_elemwisemul_post_quantize_property.h"
+#include "dnnl_fc_post_quantize_property.h"
+#include "dnnl_fc_property.h"
+#include "dnnl_post_quantize_align_scale_property.h"
+#include "dnnl_post_quantize_property.h"
+#include "dnnl_transformer_post_quantize_property.h"
+#include "dnnl_transformer_qk_property.h"
+#include "dnnl_transformer_valatt_property.h"
+
+namespace mxnet {
+namespace op {
+
+MXNET_REGISTER_SUBGRAPH_BACKEND(DNNL)

Review comment:
       ```suggestion
   MXNET_REGISTER_SUBGRAPH_BACKEND(ONEDNN)
   ```
   This one might be harder to change

##########
File path: src/operator/subgraph/dnnl/dnnl_transformer_qk_property.h
##########
@@ -153,22 +152,22 @@ class SgMKLDNNTransformerQKSelector : public 
SubgraphSelector {
 
   void Reset() override {
     CHECK_GE(matched_list_.size(), 1);
-    auto new_selector = SgMKLDNNTransformerQKSelector();
+    auto new_selector = SgDNNLTransformerQKSelector();
     new_selector.Select(*matched_list_[0], nullptr);
     *this = new_selector;
   }
 };
 
-class SgMKLDNNTransformerQKProperty : public SubgraphProperty {
+class SgDNNLTransformerQKProperty : public SubgraphProperty {
  public:
-  SgMKLDNNTransformerQKProperty() {}
+  SgDNNLTransformerQKProperty() {}
 
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN Transformer optimization pass";
-    auto property                  = 
std::make_shared<SgMKLDNNTransformerQKProperty>();
+    static const std::string& name = "DNNL Transformer optimization pass";
+    auto property                  = 
std::make_shared<SgDNNLTransformerQKProperty>();
     property->SetAttr<std::string>("property_name", name);
     property->SetAttr<bool>("inference_only", true);
-    if (dmlc::GetEnv("MXNET_DISABLE_MKLDNN_TRANSFORMER_OPT", 0)) {
+    if (dmlc::GetEnv("MXNET_DISABLE_DNNL_TRANSFORMER_OPT", 0)) {

Review comment:
       ```suggestion
       if (dmlc::GetEnv("MXNET_DISABLE_ONEDNN_TRANSFORMER_OPT", 0)) {
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_fc_post_quantize_property.h
##########
@@ -146,22 +145,22 @@ class SgMKLDNNFCPostQuantizeSelector : public 
SubgraphSelectorV2 {
 
   void Reset() override {
     CHECK_GE(matched_list.size(), 1);
-    auto new_selector = SgMKLDNNFCPostQuantizeSelector(disable_all, 
disable_float_output);
+    auto new_selector = SgDNNLFCPostQuantizeSelector(disable_all, 
disable_float_output);
     new_selector.Select(*matched_list[0]);
     *this = new_selector;
   }
 };
 
-class SgMKLDNNFCPostQuantizeProperty : public SubgraphProperty {
+class SgDNNLFCPostQuantizeProperty : public SubgraphProperty {
  public:
-  SgMKLDNNFCPostQuantizeProperty() {
+  SgDNNLFCPostQuantizeProperty() {
     disable_fuse_all     = dmlc::GetEnv("MXNET_DISABLE_ONEDNN_QFC_FUSE_ALL", 
false);
     disable_float_output = 
dmlc::GetEnv("MXNET_DISABLE_ONEDNN_QFC_FLOAT_OUTPUT", false);
   }
 
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN FullyConected post-quantization 
optimization pass";
-    auto property                  = 
std::make_shared<SgMKLDNNFCPostQuantizeProperty>();
+    static const std::string& name = "DNNL FullyConected post-quantization 
optimization pass";

Review comment:
       ```suggestion
       static const std::string& name = "oneDNN FullyConected post-quantization 
optimization pass";
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_post_quantize_property.h
##########
@@ -112,22 +111,22 @@ class SgMKLDNNPostQuantizeSelector : public 
SubgraphSelector {
 
   void Reset() override {
     CHECK_GE(matched_list.size(), 1);
-    auto new_selector = SgMKLDNNPostQuantizeSelector();
+    auto new_selector = SgDNNLPostQuantizeSelector();
     new_selector.Select(*matched_list[0]);
     *this = new_selector;
   }
 };
 
-class SgMKLDNNPostQuantizeProperty : public SubgraphProperty {
+class SgDNNLPostQuantizeProperty : public SubgraphProperty {
  public:
-  SgMKLDNNPostQuantizeProperty() {
-    support_requantize_fusion_op_name.insert("_sg_mkldnn_conv");
+  SgDNNLPostQuantizeProperty() {
+    support_requantize_fusion_op_name.insert("_sg_dnnl_conv");
     
support_requantize_fusion_op_name.insert("_contrib_quantized_elemwise_add");
     support_requantize_fusion_op_name.insert("_contrib_quantized_npi_add");
   }
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN post-quantization optimization 
pass";
-    auto property                  = 
std::make_shared<SgMKLDNNPostQuantizeProperty>();
+    static const std::string& name = "DNNL post-quantization optimization 
pass";

Review comment:
       ```suggestion
       static const std::string& name = "oneDNN post-quantization optimization 
pass";
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_transformer.cc
##########
@@ -123,7 +122,7 @@ class SgMKLDNNSelfAttQKOp {
                 const std::vector<NDArray>& inputs,
                 const std::vector<OpReqType>& req,
                 const std::vector<NDArray>& outputs) {
-    LOG(FATAL) << "Not implemented: subgraph mkldnn self attention qk only 
supports "
+    LOG(FATAL) << "Not implemented: subgraph dnnl self attention qk only 
supports "

Review comment:
       ```suggestion
       LOG(FATAL) << "Not implemented: subgraph oneDNN self attention qk only 
supports "
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_elemwisemul_post_quantize_property.h
##########
@@ -161,7 +160,7 @@ class ElemwiseMulPostQuantizeProperty : public 
SubgraphProperty {
   }
 
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN EltwiseMul post-quantization 
optimization pass";
+    static const std::string& name = "DNNL EltwiseMul post-quantization 
optimization pass";

Review comment:
       ```suggestion
       static const std::string& name = "oneDNN EltwiseMul post-quantization 
optimization pass";
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_conv_property.h
##########
@@ -204,7 +199,7 @@ class SgMKLDNNConvProperty : public SubgraphProperty {
     nnvm::Symbol new_sym;
     new_sym.outputs.emplace_back(last_node);
     std::ostringstream node_name;
-    node_name << "sg_mkldnn_";
+    node_name << "sg_dnnl_";

Review comment:
       ```suggestion
       node_name << "sg_onednn_";
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_conv_property.h
##########
@@ -170,25 +165,25 @@ class SgMKLDNNConvSelector : public SubgraphSelector {
 
   void Reset() override {
     CHECK_GE(matched_list_.size(), 1);
-    auto new_selector = SgMKLDNNConvSelector(
+    auto new_selector = SgDNNLConvSelector(
         disable_all_, disable_conv_bn_, disable_conv_act_, disable_conv_sum_, 
quantize_);
     new_selector.Select(*matched_list_[0], nullptr);
     *this = new_selector;
   }
 };
 
-class SgMKLDNNConvProperty : public SubgraphProperty {
+class SgDNNLConvProperty : public SubgraphProperty {
  public:
-  SgMKLDNNConvProperty() {
+  SgDNNLConvProperty() {
     disable_conv_bn_  = dmlc::GetEnv("MXNET_DISABLE_ONEDNN_FUSE_CONV_BN", 0);
     disable_conv_act_ = dmlc::GetEnv("MXNET_DISABLE_ONEDNN_FUSE_CONV_RELU", 0);
     disable_conv_sum_ = dmlc::GetEnv("MXNET_DISABLE_ONEDNN_FUSE_CONV_SUM", 0);
 
     disable_all_ = disable_conv_bn_ && disable_conv_act_ && disable_conv_sum_;
   }
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN convolution optimization pass";
-    auto property                  = std::make_shared<SgMKLDNNConvProperty>();
+    static const std::string& name = "DNNL convolution optimization pass";

Review comment:
       ```suggestion
       static const std::string& name = "oneDNN convolution optimization pass";
   ```

##########
File path: python/mxnet/contrib/quantization.py
##########
@@ -527,13 +527,13 @@ def quantize_model(sym, arg_params, aux_params, 
data_names=('data',),
 
     return qsym, qarg_params, aux_params
 
-def quantize_model_mkldnn(sym, arg_params, aux_params, data_names=('data',),
-                          ctx=cpu(), excluded_sym_names=None, 
excluded_op_names=None,
-                          calib_mode='entropy', calib_data=None, 
num_calib_batches=None,
-                          quantized_dtype='int8', quantize_mode='smart',
-                          quantize_granularity='tensor-wise', logger=None):
+def quantize_model_dnnl(sym, arg_params, aux_params, data_names=('data',),

Review comment:
       ```suggestion
   def quantize_model_onednn(sym, arg_params, aux_params, data_names=('data',),
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_transformer_qk_property.h
##########
@@ -153,22 +152,22 @@ class SgMKLDNNTransformerQKSelector : public 
SubgraphSelector {
 
   void Reset() override {
     CHECK_GE(matched_list_.size(), 1);
-    auto new_selector = SgMKLDNNTransformerQKSelector();
+    auto new_selector = SgDNNLTransformerQKSelector();
     new_selector.Select(*matched_list_[0], nullptr);
     *this = new_selector;
   }
 };
 
-class SgMKLDNNTransformerQKProperty : public SubgraphProperty {
+class SgDNNLTransformerQKProperty : public SubgraphProperty {
  public:
-  SgMKLDNNTransformerQKProperty() {}
+  SgDNNLTransformerQKProperty() {}
 
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN Transformer optimization pass";
-    auto property                  = 
std::make_shared<SgMKLDNNTransformerQKProperty>();
+    static const std::string& name = "DNNL Transformer optimization pass";

Review comment:
       ```suggestion
       static const std::string& name = "oneDNN Transformer optimization pass";
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_fc_property.h
##########
@@ -156,21 +155,21 @@ class SgMKLDNNFCSelector : public SubgraphSelector {
 
   void Reset() override {
     CHECK_GE(matched_list_.size(), 1);
-    auto new_selector = SgMKLDNNFCSelector(disable_fc_eltwise_, quantized_);
+    auto new_selector = SgDNNLFCSelector(disable_fc_eltwise_, quantized_);
     new_selector.Select(*matched_list_[0], nullptr);
     *this = new_selector;
   }
 };
 
-class SgMKLDNNFCProperty : public SubgraphProperty {
+class SgDNNLFCProperty : public SubgraphProperty {
  public:
-  SgMKLDNNFCProperty() {
+  SgDNNLFCProperty() {
     disable_fc_eltwise_ = dmlc::GetEnv("MXNET_DISABLE_ONEDNN_FUSE_FC_ELTWISE", 
false);
   }
 
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN FullyConnected optimization pass";
-    auto property                  = std::make_shared<SgMKLDNNFCProperty>();
+    static const std::string& name = "DNNL FullyConnected optimization pass";

Review comment:
       ```suggestion
       static const std::string& name = "oneDNN FullyConnected optimization 
pass";
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_transformer_valatt_property.h
##########
@@ -227,22 +226,22 @@ class SgMKLDNNTransformerValAttSelector : public 
SubgraphSelectorV2 {
 
   void Reset() override {
     CHECK_GE(matched_list_.size(), 1);
-    auto new_selector = SgMKLDNNTransformerValAttSelector();
+    auto new_selector = SgDNNLTransformerValAttSelector();
     new_selector.Select(*matched_list_[0], nullptr);
     *this = new_selector;
   }
 };
 
-class SgMKLDNNTransformerValAttProperty : public SubgraphProperty {
+class SgDNNLTransformerValAttProperty : public SubgraphProperty {
  public:
-  SgMKLDNNTransformerValAttProperty() {}
+  SgDNNLTransformerValAttProperty() {}
 
   static SubgraphPropertyPtr Create() {
-    static const std::string& name = "MKLDNN Transformer optimization pass";
-    auto property                  = 
std::make_shared<SgMKLDNNTransformerValAttProperty>();
+    static const std::string& name = "DNNL Transformer optimization pass";

Review comment:
       ```suggestion
       static const std::string& name = "oneDNN Transformer optimization pass";
   ```

##########
File path: src/operator/subgraph/dnnl/dnnl_conv.cc
##########
@@ -686,23 +682,22 @@ static bool SgMKLDNNConvOpStorageType(const 
nnvm::NodeAttrs& attrs,
   }
 }
 
-std::vector<std::pair<int, int>> SgMKLDNNConvInplaceOption(const NodeAttrs& 
attrs) {
-  auto const& param = nnvm::get<MKLDNNConvFusionParam>(attrs.parsed);
-  if (param.full_conv_param.mkldnn_param.with_sum &&
-      !param.full_conv_param.mkldnn_param.dedup_sum) {
+std::vector<std::pair<int, int>> SgDNNLConvInplaceOption(const NodeAttrs& 
attrs) {
+  auto const& param = nnvm::get<DNNLConvFusionParam>(attrs.parsed);
+  if (param.full_conv_param.dnnl_param.with_sum && 
!param.full_conv_param.dnnl_param.dedup_sum) {
     return std::vector<std::pair<int, int>>{{GetInSumIndex(param), 0}};
   } else {
     return std::vector<std::pair<int, int>>();
   }
 }
 
-nnvm::ObjectPtr SgMKLDNNConvQuantizedOp(const NodeAttrs& attrs) {
-  auto const& param    = nnvm::get<MKLDNNConvFusionParam>(attrs.parsed);
+nnvm::ObjectPtr SgDNNLConvQuantizedOp(const NodeAttrs& attrs) {
+  auto const& param    = nnvm::get<DNNLConvFusionParam>(attrs.parsed);
   nnvm::ObjectPtr node = nnvm::Node::Create();
-  node->attrs.op       = Op::Get("_sg_mkldnn_conv");
+  node->attrs.op       = Op::Get("_sg_dnnl_conv");
   const int k_ndims    = param.full_conv_param.conv_param.kernel.ndim();
   CHECK(k_ndims == 2U || k_ndims == 3U)
-      << "Quantized Convolution of MKL-DNN supports 2D/3D kernel currently."
+      << "Quantized Convolution of DNNL supports 2D/3D kernel currently."

Review comment:
       ```suggestion
         << "Quantized Convolution of oneDNN supports 2D/3D kernel currently."
   ```




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to