diff --git a/imblearn/metrics/_regression.py b/imblearn/metrics/_regression.py new file mode 100644 index 000000000..a9de8ecb2 --- /dev/null +++ b/imblearn/metrics/_regression.py @@ -0,0 +1,26 @@ +import numpy as np + +def macro_mean_squared_error(y_true, y_pred): + """ + Compute the macro-averaged mean squared error. + + Parameters + ---------- + y_true : array-like of shape (n_samples, n_outputs) + True values. + y_pred : array-like of shape (n_samples, n_outputs) + Predicted values. + + Returns + ------- + float + Macro-averaged MSE over all outputs. + """ + y_true = np.array(y_true) + y_pred = np.array(y_pred) + + # Compute MSE for each output + mse_per_output = np.mean((y_true - y_pred) ** 2, axis=0) + + # Return macro-average + return np.mean(mse_per_output) diff --git a/imblearn/tests/test_regression.py b/imblearn/tests/test_regression.py new file mode 100644 index 000000000..7807478b3 --- /dev/null +++ b/imblearn/tests/test_regression.py @@ -0,0 +1,10 @@ +import numpy as np +from imblearn.metrics._regression import macro_mean_squared_error + +def test_macro_mse(): + y_true = np.array([[1, 2], [3, 4]]) + y_pred = np.array([[1, 1], [4, 4]]) + + result = macro_mean_squared_error(y_true, y_pred) + expected = np.mean([np.mean([0,1]), np.mean([1,0])]) # manual calculation + assert np.isclose(result, expected)