-
Notifications
You must be signed in to change notification settings - Fork 16
Expand file tree
/
Copy pathmagic_module.py
More file actions
110 lines (90 loc) · 4.03 KB
/
magic_module.py
File metadata and controls
110 lines (90 loc) · 4.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import torch.nn as nn
from torch.nn import functional as F
from torch.nn.modules.utils import _pair
from pytorch_pretrained_bert.modeling import BertSelfAttention
import operator
import copy
class MagicModule(nn.Module):
def __init__(self, module):
nn.Module.__init__(self)
self._type = type(module)
for key, value in module._parameters.items():
if value is not None:
self.register_parameter('_origin_' + key, value)
self.register_buffer(key, value.data)
else:
self.register_buffer(key, None)
for key, value in module._buffers.items():
self.register_buffer(key, copy.deepcopy(value))
for key, value in module._modules.items():
self.add_module(key, MagicModule(value))
for key, value in module.__dict__.items():
if (not key in self.__dict__) and\
(not key in self._buffers) and\
(not key in self._modules):
self.__setattr__(key, value)
def forward(self, *args, **kwargs):
return self._type.forward(self, *args, **kwargs)
def update_params(self, deltas):
sub_params = {}
for key, delta in deltas.items():
if not ('.' in key):
self._buffers[key] = self._buffers[key] + delta
else:
attr = key.split('.')[0]
if not (attr in sub_params):
sub_params[attr] = {}
sub_params[attr]['.'.join(key.split('.')[1:])] = delta
for key, value in sub_params.items():
self._modules[key].update_params(value)
def check_forward_args(self, *args, **kwargs):
assert issubclass(self._type, nn.RNNBase)
return nn.RNNBase.check_forward_args(self, *args, **kwargs)
@property
def _flat_weights(self):
assert issubclass(self._type, nn.RNNBase)
return [p for layerparams in self.all_weights for p in layerparams]
@property
def all_weights(self):
assert issubclass(self._type, nn.RNNBase)
return [[getattr(self, weight) for weight in weights] for weights in
self._all_weights]
def _get_abs_string_index(self, idx):
assert issubclass(self._type, nn.ModuleList)
"""Get the absolute index for the list of modules"""
idx = operator.index(idx)
if not (-len(self) <= idx < len(self)):
raise IndexError('index {} is out of range'.format(idx))
if idx < 0:
idx += len(self)
return str(idx)
def __getitem__(self, idx):
assert issubclass(self._type, nn.ModuleList)
if isinstance(idx, slice):
return self.__class__(list(self._modules.values())[idx])
else:
return self._modules[self._get_abs_string_index(idx)]
def __len__(self):
assert issubclass(self._type, nn.ModuleList)
return len(self._modules)
def transpose_for_scores(self, x):
assert issubclass(self._type, BertSelfAttention)
new_x_shape = x.size()[:-1] + (
self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def conv2d_forward(self, input, weight):
assert issubclass(self._type, nn.Conv2d)
if self.padding_mode == 'circular':
expanded_padding = ((self.padding[1] + 1) // 2, self.padding[1] // 2,
(self.padding[0] + 1) // 2, self.padding[0] // 2)
return F.conv2d(F.pad(input, expanded_padding, mode='circular'),
weight, self.bias, self.stride,
_pair(0), self.dilation, self.groups)
return F.conv2d(input, weight, self.bias, self.stride,
self.padding, self.dilation, self.groups)
def _check_input_dim(self, input):
assert issubclass(self._type, nn.BatchNorm2d)
if input.dim() != 4:
raise ValueError('expected 4D input (got {}D input)'
.format(input.dim()))