-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathjudging_metrics.py
60 lines (47 loc) · 2 KB
/
judging_metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
from keras.models import load_model
import os
import tensorflow as tf
from sklearn.metrics import precision_score, recall_score, f1_score
model = load_model('models.h5', compile=False)
# predict dataset
predict_dir = os.path.join('predict')
predict_dataset = tf.keras.preprocessing.image_dataset_from_directory(predict_dir)
y_true = []
y_pred = []
for images, labels in predict_dataset:
images = tf.image.resize(images, (160, 160))
predictions = model.predict_on_batch(images).flatten()
predictions = tf.nn.sigmoid(predictions)
predictions = tf.where(predictions < 0.5, 0, 1)
y_true.extend(labels.numpy())
y_pred.extend(predictions.numpy())
print('Precision: ', precision_score(y_true, y_pred))
print('Recall: ', recall_score(y_true, y_pred))
print('F1: ', f1_score(y_true, y_pred))
# write the scores to score.txt
with open('judging_metrics_.txt', 'w') as f:
f.write('predict dataset' + '\n')
f.write('Precision: ' + str(precision_score(y_true, y_pred)) + '\n')
f.write('Recall: ' + str(recall_score(y_true, y_pred)) + '\n')
f.write('F1: ' + str(f1_score(y_true, y_pred)) + '\n\n')
# test dataset
test_dir = os.path.join('test')
test_dataset = tf.keras.preprocessing.image_dataset_from_directory(test_dir)
y_true = []
y_pred = []
for images, labels in test_dataset:
images = tf.image.resize(images, (160, 160))
predictions = model.predict_on_batch(images).flatten()
predictions = tf.nn.sigmoid(predictions)
predictions = tf.where(predictions < 0.5, 0, 1)
y_true.extend(labels.numpy())
y_pred.extend(predictions.numpy())
print('Precision: ', precision_score(y_true, y_pred))
print('Recall: ', recall_score(y_true, y_pred))
print('F1: ', f1_score(y_true, y_pred))
# write the scores to score.txt
with open('judging_metrics_.txt', 'a') as f:
f.write('test dataset' + '\n')
f.write('Precision: ' + str(precision_score(y_true, y_pred)) + '\n')
f.write('Recall: ' + str(recall_score(y_true, y_pred)) + '\n')
f.write('F1: ' + str(f1_score(y_true, y_pred)))