Comparision with sklearn metrics done - testing

pavan
Pavan Mandava 6 years ago
parent 6410dda0cb
commit 3fe33ab51a

@ -29,12 +29,12 @@ def f1_score(y_true, y_pred, labels, average):
f_score = calculate_f1_score(precision, recall) f_score = calculate_f1_score(precision, recall)
f1_sum += f_score f1_sum += f_score
if average is None: if average is None:
f1_score_list.append(Result(precision, recall, average, item['label'], round(f_score, 3))) f1_score_list.append(Result(precision, recall, average, item['label'], round(f_score, 4)))
if average is None: if average is None:
return f1_score_list return f1_score_list
elif average == const.AVG_MACRO: elif average == const.AVG_MACRO:
return [Result(None, None, average, None, round(f1_sum / len(pr_list), 3))] return [Result(None, None, average, None, round(f1_sum / len(pr_list), 4))]
elif average == const.AVG_MICRO: elif average == const.AVG_MICRO:
aggregate_tp = 0 aggregate_tp = 0
@ -51,7 +51,7 @@ def f1_score(y_true, y_pred, labels, average):
agg_recall = get_recall(aggregate_tp, aggregate_fn) agg_recall = get_recall(aggregate_tp, aggregate_fn)
agg_f1_score = calculate_f1_score(agg_precision, agg_recall) agg_f1_score = calculate_f1_score(agg_precision, agg_recall)
return [Result(agg_precision, agg_recall, average, None, round(agg_f1_score, 3))] return [Result(agg_precision, agg_recall, average, None, round(agg_f1_score, 4))]
return None return None

@ -1,5 +1,6 @@
from eval.metrics import f1_score from eval.metrics import f1_score
import utils.constants as const import utils.constants as const
from sklearn.metrics import f1_score as f1
y_true = ['positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative'] y_true = ['positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative']
y_pred = ['positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'negative'] y_pred = ['positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'positive', 'positive', 'negative', 'negative', 'negative']
@ -9,7 +10,11 @@ result_list = f1_score(y_true, y_pred, ['positive', 'negative'], const.AVG_MICRO
for result in result_list: for result in result_list:
result.print_result() result.print_result()
print('SK Learn F1 Score (MICRO):: ', f1(y_true, y_pred, ['positive', 'negative'], average='micro'))
result_list = f1_score(y_true, y_pred, ['positive', 'negative'], const.AVG_MACRO) result_list = f1_score(y_true, y_pred, ['positive', 'negative'], const.AVG_MACRO)
for result in result_list: for result in result_list:
result.print_result() result.print_result()
print('SK Learn F1 Score (MACRO):: ', f1(y_true, y_pred, ['positive', 'negative'], average='macro'))

Loading…
Cancel
Save