diff --git a/eval/metrics.py b/eval/metrics.py index 5ca2331..9719648 100644 --- a/eval/metrics.py +++ b/eval/metrics.py @@ -29,12 +29,12 @@ def f1_score(y_true, y_pred, labels, average): f_score = calculate_f1_score(precision, recall) f1_sum += f_score if average is None: - f1_score_list.append(Result(precision, recall, average, item['label'], round(f_score, 3))) + f1_score_list.append(Result(precision, recall, average, item['label'], round(f_score, 4))) if average is None: return f1_score_list elif average == const.AVG_MACRO: - return [Result(None, None, average, None, round(f1_sum / len(pr_list), 3))] + return [Result(None, None, average, None, round(f1_sum / len(pr_list), 4))] elif average == const.AVG_MICRO: aggregate_tp = 0 @@ -51,7 +51,7 @@ def f1_score(y_true, y_pred, labels, average): agg_recall = get_recall(aggregate_tp, aggregate_fn) agg_f1_score = calculate_f1_score(agg_precision, agg_recall) - return [Result(agg_precision, agg_recall, average, None, round(agg_f1_score, 3))] + return [Result(agg_precision, agg_recall, average, None, round(agg_f1_score, 4))] return None diff --git a/testing/eval_testing.py b/testing/eval_testing.py index e6fa801..acf2c7d 100644 --- a/testing/eval_testing.py +++ b/testing/eval_testing.py @@ -1,5 +1,6 @@ from eval.metrics import f1_score import utils.constants as const +from sklearn.metrics import f1_score as f1 y_true = ['positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative'] y_pred = ['positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'negative'] @@ -9,7 +10,11 @@ result_list = f1_score(y_true, y_pred, ['positive', 'negative'], const.AVG_MICRO for result in result_list: result.print_result() +print('SK Learn F1 Score (MICRO):: ', f1(y_true, y_pred, ['positive', 'negative'], average='micro')) + result_list = f1_score(y_true, y_pred, ['positive', 'negative'], const.AVG_MACRO) for result in result_list: result.print_result() + +print('SK Learn F1 Score (MACRO):: ', f1(y_true, y_pred, ['positive', 'negative'], average='macro'))