piiswrong closed pull request #6496: Add hinge loss for svm URL: https://github.com/apache/incubator-mxnet/pull/6496
This is a PR merged from a forked repository. As GitHub hides the original diff on merge, it is displayed below for the sake of provenance: As this is a foreign pull request (from a fork), the diff is supplied below (as it won't show otherwise due to GitHub magic): diff --git a/python/mxnet/metric.py b/python/mxnet/metric.py index c57f12fc5d..fb91c30cdb 100644 --- a/python/mxnet/metric.py +++ b/python/mxnet/metric.py @@ -941,6 +941,70 @@ def __init__(self, name='caffe', name, output_names=output_names, label_names=label_names) +@register +class HingeLoss(EvalMetric): + """Computes Hinge loss for SVM. + + The hinge loss for one example: + + .. math:: + \\L_i={\sum_{j\neq y_i}max(0, w_j^Tx_i-w_{y_i}^Tx_i+margin)} + + Parameters + ---------- + name : str + Name of this metric instance for display. + use_linear : boolean, Optional + Whether to use L1-regularized objective (the default is False). + margin : float, Optional + Margin for the SVM (the default is 1.0). + output_names : list of str, or None + Name of predictions that should be used when updating with update_dict. + By default include all predictions. + label_names : list of str, or None + Name of labels that should be used when updating with update_dict. + By default include all labels. + + Examples + -------- + >>> predicts = [mx.nd.array(np.array([-1.73, -1.24, 0.89, -0.99, 0.05]).reshape(1, -1))] + >>> labels = [mx.nd.array(np.array([2]))] + >>> hinge_loss_l1 = mx.metric.HingeLoss(use_linear=True) + >>> hinge_loss_l1.update(labels = labels, preds = predicts) + >>> print hinge_loss_l1.get() + ('hinge_loss', 0.1600000262260437) + >>> hinge_loss_l2 = mx.metric.HingeLoss() + >>> hinge_loss_l2.update(labels = labels, preds = predicts) + >>> print hinge_loss_l2.get() + ('hinge_loss', 0.02560000866651535) + """ + def __init__(self, name='hinge_loss', use_linear=False, margin=1.0, + output_names=None, label_names=None): + super(HingeLoss, self).__init__( + name, output_names=output_names, label_names=label_names) + self.use_linear = use_linear + self.margin = margin + + def norm(self, x): + return x if self.use_linear else x**2.0 + + def update(self, labels, preds): + check_label_shapes(labels, preds) + + for label, pred in zip(labels, preds): + n = label.shape[0] + pred = pred.asnumpy() + label = label.asnumpy().astype('int32') + + pred = pred - pred[numpy.arange(n), label].reshape(-1, 1) + self.margin + pred[numpy.arange(n), label] = 0 + + loss = numpy.maximum(0, pred) + + self.sum_metric += numpy.sum(self.norm(loss)) + self.num_inst += n + + @register class CustomMetric(EvalMetric): """Computes a customized evaluation metric. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services