-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathmetrics.py
50 lines (39 loc) · 1.61 KB
/
metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
def confusion_matrix(pred, real):
"""
Input: List of Prediction and real data.
Output: confusion_matrix as dataframe, precision, recall, f1-score
|gt \ pred | P | N |
-----------|---|---|
| P |TP |FN |
|----------|---|---|
| N |FP |TN |
|----------|---|---|
TP: Model predicted class 1 for class 1.
FP: Model predicted class 1 for class 0.
FN: Model predicted class 0 for class 1.
TN: Model predicted class 0 for class 0.
precision: How many did we predicted correct?
TP / (TP + FP)
recall: How many of predicted correct were classified correctly?
TP / (TP + FN)
f1: Harmonic Mean of precision and recall
"""
x = pred
y = real
tp = sum(((x == 1) == True) * ((y == 1)== True))
fp = sum(((x == 1) == True) * ((y == 1)== False))
fn = sum(((x == 1) == False) * ((y == 1)== True))
tn = sum(((x == 1) == False) * ((y == 1)== False))
# print("|gt \ pred | P | N |")
# print("-----------|---|---|")
# print(f"| P | {tp} | {fn} |")
# print("|----------|---|---|")
# print(f"| N | {fp} | {tn} |")
# print("|----------|---|---|")
lbl = ['P', 'N']
index = ['P', 'N']
df = pd.DataFrame(data = [[tp, fn], [fp, tn]], index=index, columns=lbl)
precision = tp / (tp + fp)
recall = tp / (tp + fn)
f1 = 2 * precision * recall / (precision + recall)
return df, round(precision, 5), round(recall, 5), round(f1, 5)