File size: 1,180 Bytes
711e816
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import argparse
import csv

from sklearn.metrics import accuracy_score, confusion_matrix

parser = argparse.ArgumentParser()

parser.add_argument(
    "--ground-truth",
    required=True
)

parser.add_argument(
    "--predictions",
    required=True
)

args = parser.parse_args()

ground_truth_csv = args.ground_truth
predictions_csv = args.predictions

gt = {}

with open(ground_truth_csv, newline="") as f:
    reader = csv.reader(f)
    header = next(reader)

    for row in reader:
        path = row[0]
        label = row[1]
        gt[path] = label


preds = {}

with open(predictions_csv, newline="") as f:
    reader = csv.reader(f)
    header = next(reader)

    for row in reader:
        path = row[0]
        label = row[1]
        preds[path] = label


y_true = []
y_pred = []

for path, true_label in gt.items():
    if path in preds:
        y_true.append(true_label)
        y_pred.append(preds[path])


acc = accuracy_score(y_true, y_pred)

cm = confusion_matrix(y_true, y_pred)

print("Accuracy:", acc)
print("Confusion matrix:")
for row in cm:
    print(" ".join(str(x) for x in row))