-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgrad_mode.py
131 lines (101 loc) · 3.45 KB
/
grad_mode.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import torch
import functools
class no_grad(object):
r"""Context-manager that disabled gradient calculation.
Disabling gradient calculation is useful for inference, when you are sure
that you will not call :meth:`Tensor.backward()`. It will reduce memory
consumption for computations that would otherwise have `requires_grad=True`.
In this mode, the result of every computation will have
`requires_grad=False`, even when the inputs have `requires_grad=True`.
Also functions as a decorator.
Example::
>>> x = torch.tensor([1], requires_grad=True)
>>> with torch.no_grad():
... y = x * 2
>>> y.requires_grad
False
>>> @torch.no_grad()
... def doubler(x):
... return x * 2
>>> z = doubler(x)
>>> z.requires_grad
False
"""
def __enter__(self):
self.prev = torch.is_grad_enabled()
torch._C.set_grad_enabled(False)
def __exit__(self, *args):
torch.set_grad_enabled(self.prev)
return False
def __call__(self, func):
@functools.wraps(func)
def decorate_no_grad(*args, **kwargs):
with self:
return func(*args, **kwargs)
return decorate_no_grad
class enable_grad(object):
r"""Context-manager that enables gradient calculation.
Enables gradient calculation inside a :class:`~no_grad` context. This has
no effect outside of :class:`~no_grad`.
Also functions as a decorator.
Example::
>>> x = torch.tensor([1], requires_grad=True)
>>> with torch.no_grad():
... with torch.enable_grad():
... y = x * 2
>>> y.requires_grad
True
>>> y.backward()
>>> x.grad
>>> @torch.enable_grad()
... def doubler(x):
... return x * 2
>>> with torch.no_grad():
... z = doubler(x)
>>> z.requires_grad
True
"""
def __enter__(self):
self.prev = torch.is_grad_enabled()
torch._C.set_grad_enabled(True)
def __exit__(self, *args):
torch.set_grad_enabled(self.prev)
return False
def __call__(self, func):
@functools.wraps(func)
def decorate_enable_grad(*args, **kwargs):
with self:
return func(*args, **kwargs)
return decorate_enable_grad
class set_grad_enabled(object):
r"""Context-manager that sets gradient calculation to on or off.
``set_grad_enabled`` will enable or disable grads based on its argument :attr:`mode`.
It can be used as a context-manager or as a function.
Arguments:
mode (bool): Flag whether to enable grad (``True``), or disable
(``False``). This can be used to conditionally enable
gradients.
Example::
>>> x = torch.tensor([1], requires_grad=True)
>>> is_train = False
>>> with torch.set_grad_enabled(is_train):
... y = x * 2
>>> y.requires_grad
False
>>> torch.set_grad_enabled(True)
>>> y = x * 2
>>> y.requires_grad
True
>>> torch.set_grad_enabled(False)
>>> y = x * 2
>>> y.requires_grad
False
"""
def __init__(self, mode):
self.prev = torch.is_grad_enabled()
torch._C.set_grad_enabled(mode)
def __enter__(self):
pass
def __exit__(self, *args):
torch.set_grad_enabled(self.prev)
return False