-
Notifications
You must be signed in to change notification settings - Fork 140
/
utils.py
92 lines (70 loc) · 2.4 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
"""Utilities for ADDA."""
import os
import random
import torch
import torch.backends.cudnn as cudnn
from torch.autograd import Variable
import params
from datasets import get_mnist, get_usps
def make_variable(tensor, volatile=False):
"""Convert Tensor to Variable."""
if torch.cuda.is_available():
tensor = tensor.cuda()
return Variable(tensor, volatile=volatile)
def make_cuda(tensor):
"""Use CUDA if it's available."""
if torch.cuda.is_available():
tensor = tensor.cuda()
return tensor
def denormalize(x, std, mean):
"""Invert normalization, and then convert array into image."""
out = x * std + mean
return out.clamp(0, 1)
def init_weights(layer):
"""Init weights for layers w.r.t. the original paper."""
layer_name = layer.__class__.__name__
if layer_name.find("Conv") != -1:
layer.weight.data.normal_(0.0, 0.02)
elif layer_name.find("BatchNorm") != -1:
layer.weight.data.normal_(1.0, 0.02)
layer.bias.data.fill_(0)
def init_random_seed(manual_seed):
"""Init random seed."""
seed = None
if manual_seed is None:
seed = random.randint(1, 10000)
else:
seed = manual_seed
print("use random seed: {}".format(seed))
random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(seed)
def get_data_loader(name, train=True):
"""Get data loader by name."""
if name == "MNIST":
return get_mnist(train)
elif name == "USPS":
return get_usps(train)
def init_model(net, restore):
"""Init models with cuda and weights."""
# init weights of model
net.apply(init_weights)
# restore model weights
if restore is not None and os.path.exists(restore):
net.load_state_dict(torch.load(restore))
net.restored = True
print("Restore model from: {}".format(os.path.abspath(restore)))
# check if cuda is available
if torch.cuda.is_available():
cudnn.benchmark = True
net.cuda()
return net
def save_model(net, filename):
"""Save trained model."""
if not os.path.exists(params.model_root):
os.makedirs(params.model_root)
torch.save(net.state_dict(),
os.path.join(params.model_root, filename))
print("save pretrained model to: {}".format(os.path.join(params.model_root,
filename)))