roywei commented on a change in pull request #14685: [Fit API] improve event 
handlers
URL: https://github.com/apache/incubator-mxnet/pull/14685#discussion_r277092803
 
 

 ##########
 File path: python/mxnet/gluon/contrib/estimator/estimator.py
 ##########
 @@ -148,63 +145,70 @@ def _is_initialized(self):
                 return False
         return True
 
-    def _batch_fn(self, batch, ctx, is_iterator=False):
-        if is_iterator:
-            data = batch.data[0]
-            label = batch.label[0]
-        else:
-            data = batch[0]
-            label = batch[1]
+    def _get_data_and_label(self, batch, ctx):
+        data = batch[0]
+        label = batch[1]
         data = gluon.utils.split_and_load(data, ctx_list=ctx, batch_axis=0)
         label = gluon.utils.split_and_load(label, ctx_list=ctx, batch_axis=0)
         return data, label
 
+    def prepare_loss_and_metrics(self):
+        """
+        Based on loss functions and training metrics in estimator
+        Create metric wrappers to record loss values,
+        Create copies of train loss/metric objects to record validation values
+        """
+        if any(not hasattr(self, attribute) for attribute in
+               ['train_metrics', 'val_metrics']):
+            # Use default mx.metric.Accuracy() for 
gluon.loss.SoftmaxCrossEntropyLoss()
+            if not self.train_metrics and any([isinstance(l, 
gluon.loss.SoftmaxCrossEntropyLoss) for l in self.loss]):
+                self.train_metrics = [Accuracy()]
+            self.val_metrics = []
+            for loss in self.loss:
+                self.train_metrics.append(Loss("Train " + ''.join([i for i in 
loss.name if not i.isdigit()])))
+                self.val_metrics.append(Loss("Validation " + ''.join([i for i 
in loss.name if not i.isdigit()])))
+            for metric in self.train_metrics:
+                val_metric = copy.deepcopy(metric)
+                metric.name = "Train " + metric.name
+                val_metric.name = "Validation " + val_metric.name
+                self.val_metrics.append(val_metric)
+        return self.train_metrics, self.val_metrics
+
     def evaluate(self,
                  val_data,
-                 batch_fn=None):
+                 val_metrics):
         """Evaluate model on validation data
 
          Parameters
          ----------
          val_data : DataLoader
              validation data with data and labels
-         batch_fn : function
-             custom batch function to extract data and label
-             from a data batch and load into contexts(devices)
+         val_metrics : EvalMetric or list of EvalMetrics
+             metrics to update validation result
          """
 
-        for metric in self.val_metrics + self.val_loss_metrics:
+        for metric in val_metrics:
             metric.reset()
 
         for _, batch in enumerate(val_data):
-            if not batch_fn:
-                if isinstance(val_data, gluon.data.DataLoader):
-                    data, label = self._batch_fn(batch, self.context)
-                else:
-                    raise ValueError("You are using a custom iteration, please 
also provide "
-                                     "batch_fn to extract data and label. 
Alternatively, you "
-                                     "can provide the data as 
gluon.data.DataLoader.")
-            else:
-                data, label = batch_fn(batch, self.context)
+            if not isinstance(val_data, gluon.data.DataLoader):
+                raise ValueError("Estimator only support input as Gluon 
DataLoader. Alternatively, you "
+                                 "can transform your DataIter or any NDArray 
into Gluon DataLoader. "
+                                 "Refer to gluon.data.dataloader")
+            data, label = self._get_data_and_label(batch, self.context)
             pred = [self.net(x) for x in data]
-            losses = []
-            for loss in self.loss:
-                losses.append([loss(y_hat, y) for y_hat, y in zip(pred, 
label)])
+            loss = [self.loss[0](y_hat, y) for y_hat, y in zip(pred, label)]
 
 Review comment:
   multi loss will be supported in 
https://github.com/apache/incubator-mxnet/pull/14628, let's get the first 
version into master and iterate on that.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to