-
Notifications
You must be signed in to change notification settings - Fork 44
/
train_rPPGNet
92 lines (67 loc) · 3.5 KB
/
train_rPPGNet
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
### it is just for research purpose, and commercial use is not allowed ###
import torch
from models.rPPGNet import *
''' ###############################################################
#
# Step 1: two loss function
# 1.1 nn.BCELoss() for skin segmentation
# 1.2 Neg_Pearson() for rPPG signal regression
#
''' ###############################################################
class Neg_Pearson(nn.Module): # Pearson range [-1, 1] so if < 0, abs|loss| ; if >0, 1- loss
def __init__(self):
super(Neg_Pearson,self).__init__()
return
def forward(self, preds, labels): # all variable operation
loss = 0
for i in range(preds.shape[0]):
sum_x = torch.sum(preds[i]) # x
sum_y = torch.sum(labels[i]) # y
sum_xy = torch.sum(preds[i]*labels[i]) # xy
sum_x2 = torch.sum(torch.pow(preds[i],2)) # x^2
sum_y2 = torch.sum(torch.pow(labels[i],2)) # y^2
N = preds.shape[1]
pearson = (N*sum_xy - sum_x*sum_y)/(torch.sqrt((N*sum_x2 - torch.pow(sum_x,2))*(N*sum_y2 - torch.pow(sum_y,2))))
#if (pearson>=0).data.cpu().numpy(): # torch.cuda.ByteTensor --> numpy
# loss += 1 - pearson
#else:
# loss += 1 - torch.abs(pearson)
loss += 1 - pearson
loss = loss/preds.shape[0]
return loss
criterion_Binary = nn.BCELoss() # binary segmentation
criterion_Pearson = Neg_Pearson() # rPPG singal
''' ###############################################################
#
# Step 2: Forward model and calculate the losses
# # inputs: facial frames --> [3, 64, 128, 128]
# skin_seg_label: binary skin labels --> [64, 64, 64]
# ecg: groundtruth smoothed ecg signals --> [64]
#
# 2.1 Forward the model, get the predicted skin maps and rPPG signals
# 2.2 Calculate the loss between predicted skin maps and binary skin labels (loss_binary)
# 2.3 Calculate the loss between predicted rPPG signals and groundtruth smoothed ecg signals (loss_ecg, loss_ecg1, loss_ecg2, loss_ecg3,## loss_ecg4, loss_ecg_aux)
#
''' ###############################################################
model = rPPGNet()
skin_map, rPPG_aux, rPPG, rPPG_SA1, rPPG_SA2, rPPG_SA3, rPPG_SA4, x_visual6464, x_visual3232 = model(inputs)
loss_binary = criterion_Binary(skin_map, skin_seg_label)
rPPG = (rPPG-torch.mean(rPPG)) /torch.std(rPPG) # normalize2
rPPG_SA1 = (rPPG_SA1-torch.mean(rPPG_SA1)) /torch.std(rPPG_SA1) # normalize2
rPPG_SA2 = (rPPG_SA2-torch.mean(rPPG_SA2)) /torch.std(rPPG_SA2) # normalize2
rPPG_SA3 = (rPPG_SA3-torch.mean(rPPG_SA3)) /torch.std(rPPG_SA3) # normalize2
rPPG_SA4 = (rPPG_SA4-torch.mean(rPPG_SA4)) /torch.std(rPPG_SA4) # normalize2
rPPG_aux = (rPPG_aux-torch.mean(rPPG_aux)) /torch.std(rPPG_aux) # normalize2
loss_ecg = criterion_Pearson(rPPG, ecg)
loss_ecg1 = criterion_Pearson(rPPG_SA1, ecg)
loss_ecg2 = criterion_Pearson(rPPG_SA2, ecg)
loss_ecg3 = criterion_Pearson(rPPG_SA3, ecg)
loss_ecg4 = criterion_Pearson(rPPG_SA4, ecg)
loss_ecg_aux = criterion_Pearson(rPPG_aux, ecg)
''' ###############################################################
#
# Step 3: loss fusion and BP
#
''' ###############################################################
loss = 0.1*loss_binary + 0.5*(loss_ecg1 + loss_ecg2 + loss_ecg3 + loss_ecg4 + loss_ecg_aux) + loss_ecg
loss.backward()