@@ -491,21 +610,14 @@ class ChunkEvaluator(MetricBase):
classEditDistance(MetricBase):
"""
Edit distance is a way of quantifying how dissimilar two strings
(e.g., words) are to each another by counting the minimum number
of edit operations (add, remove or replace) required to transform
one string into the other.
Refer to https://en.wikipedia.org/wiki/Edit_distance
This EditDistance class takes two inputs by using update function:
1. distances: a (batch_size, 1) numpy.array, each element represents the
edit distance between two sequences.
2. seq_num: a int|float value, standing for the number of sequence pairs.
and returns the overall edit distance of multiple sequence-pairs.
This API is for the management of edit distances.
Editing distance is a method to quantify the degree of dissimilarity
between two strings, such as words, by calculating the minimum editing
operand (add, delete or replace) required to convert one string into another.
Refer to https://en.wikipedia.org/wiki/Edit_distance.
Args:
name: the metrics name
name (str, optional): Metric name. For details, please refer to :ref:`api_guide_Name`. Default is None.
Examples:
.. code-block:: python
...
...
@@ -556,10 +668,8 @@ class EditDistance(MetricBase):
Update the overall edit distance
Args:
distances: a (batch_size, 1) numpy.array, each element represents the
edit distance between two sequences.
seq_num: a int|float value, standing for the number of sequence pairs.
distances(numpy.array): a (batch_size, 1) numpy.array, each element represents the edit distance between two sequences.
seq_num(int|float): standing for the number of sequence pairs.
"""
ifnot_is_numpy_(distances):
raiseValueError("The 'distances' must be a numpy ndarray.")
...
...
@@ -589,7 +699,7 @@ class EditDistance(MetricBase):
classAuc(MetricBase):
"""
The auc metric is for binary classification.
Refer to https://en.wikipedia.org/wiki/Receiver_operating_characteristic#Area_under_the_curve
Refer to https://en.wikipedia.org/wiki/Receiver_operating_characteristic#Area_under_the_curve.
Please notice that the auc metric is implemented with python, which may be a little bit slow.
If you concern the speed, please use the fluid.layers.auc instead.
...
...
@@ -602,9 +712,8 @@ class Auc(MetricBase):
computed using the height of the precision values by the recall.
Args:
name: metric name
curve: Specifies the name of the curve to be computed, 'ROC' [default] or
'PR' for the Precision-Recall-curve.
name (str, optional): Metric name. For details, please refer to :ref:`api_guide_Name`. Default is None.
curve (str): Specifies the name of the curve to be computed, 'ROC' [default] or 'PR' for the Precision-Recall-curve.
"NOTE: only implement the ROC curve type via Python now."
...
...
@@ -645,13 +754,11 @@ class Auc(MetricBase):
defupdate(self,preds,labels):
"""
Update the auc curve with the given predictions and labels
Update the auc curve with the given predictions and labels.
Args:
preds: an numpy array in the shape of (batch_size, 2), preds[i][j] denotes the probability
of classifying the instance i into the class j.
labels: an numpy array in the shape of (batch_size, 1), labels[i] is either o or 1, representing
the label of the instance i.
preds (numpy.array): an numpy array in the shape of (batch_size, 2), preds[i][j] denotes the probability of classifying the instance i into the class j.
labels (numpy.array): an numpy array in the shape of (batch_size, 1), labels[i] is either o or 1, representing the label of the instance i.
"""
ifnot_is_numpy_(labels):
raiseValueError("The 'labels' must be a numpy ndarray.")
...
...
@@ -674,6 +781,9 @@ class Auc(MetricBase):
defeval(self):
"""
Return the area (a float score) under auc curve
Return:
float: the area under auc curve
"""
tot_pos=0.0
tot_neg=0.0
...
...
@@ -864,7 +974,6 @@ class DetectionMAP(object):
defreset(self,executor,reset_program=None):
"""
Reset metric states at the begin of each pass/user specified batch.