RecallTarget

Bases: Metric

Source code in metrics_toolbox/metrics/classification/recall_target.py
class RecallTarget(Metric):
    _name = MetricNameEnum.RECALL
    _type = MetricTypeEnum.LABELS
    _scope = MetricScopeEnum.TARGET

    def __init__(self, target_name: str):
        """Initialize Recall metric for classification.

        Parameters
        ----------
        target_name : str
            Name of the target variable.
        """
        self.target_name = target_name

    @property
    def id(self) -> str:
        """Get the unique identifier for the metric.

        Returns
        -------
        str
            Unique identifier combining name, scope, and target name.
        """
        return self.name.value + "_" + str(self.target_name)

    def compute(
        self, y_true: np.ndarray, y_pred: np.ndarray, column_names: list[str]
    ) -> MetricResult:
        """Compute recall for label classification.

        Parameters
        ----------
        y_true : array-like of shape (n_samples, n_classes)
            True binary labels in one-hot encoded format.
        y_pred : array-like of shape (n_samples, n_classes)
            Predicted binary labels in one-hot encoded format.
        column_names : list[str]
            Class names corresponding to column indices.

        Returns
        -------
        MetricResult
            The computed recall metric result.
        """

        target_index = column_names.index(self.target_name)

        tp_c = sum((y_pred[:, target_index] == 1) & (y_true[:, target_index] == 1))
        fn_c = sum((y_pred[:, target_index] == 0) & (y_true[:, target_index] == 1))
        recall_c = tp_c / (tp_c + fn_c) if (tp_c + fn_c) > 0 else 0.0

        return MetricResult(
            name=self.name, scope=self.scope, type=self.type, value=recall_c
        )

id property

Get the unique identifier for the metric.

Returns:
  • str

    Unique identifier combining name, scope, and target name.

__init__(target_name)

Initialize Recall metric for classification.

Parameters:
  • target_name (str) –

    Name of the target variable.

Source code in metrics_toolbox/metrics/classification/recall_target.py
def __init__(self, target_name: str):
    """Initialize Recall metric for classification.

    Parameters
    ----------
    target_name : str
        Name of the target variable.
    """
    self.target_name = target_name

compute(y_true, y_pred, column_names)

Compute recall for label classification.

Parameters:
  • y_true (array-like of shape (n_samples, n_classes)) –

    True binary labels in one-hot encoded format.

  • y_pred (array-like of shape (n_samples, n_classes)) –

    Predicted binary labels in one-hot encoded format.

  • column_names (list[str]) –

    Class names corresponding to column indices.

Returns:
Source code in metrics_toolbox/metrics/classification/recall_target.py
def compute(
    self, y_true: np.ndarray, y_pred: np.ndarray, column_names: list[str]
) -> MetricResult:
    """Compute recall for label classification.

    Parameters
    ----------
    y_true : array-like of shape (n_samples, n_classes)
        True binary labels in one-hot encoded format.
    y_pred : array-like of shape (n_samples, n_classes)
        Predicted binary labels in one-hot encoded format.
    column_names : list[str]
        Class names corresponding to column indices.

    Returns
    -------
    MetricResult
        The computed recall metric result.
    """

    target_index = column_names.index(self.target_name)

    tp_c = sum((y_pred[:, target_index] == 1) & (y_true[:, target_index] == 1))
    fn_c = sum((y_pred[:, target_index] == 0) & (y_true[:, target_index] == 1))
    recall_c = tp_c / (tp_c + fn_c) if (tp_c + fn_c) > 0 else 0.0

    return MetricResult(
        name=self.name, scope=self.scope, type=self.type, value=recall_c
    )