Skip to content

Commit c4593f0

Browse files
authored
Resolve the problem with conv weights (#183)
1 parent 4c03338 commit c4593f0

1 file changed

Lines changed: 2 additions & 0 deletions

File tree

loralib/layers.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -247,6 +247,8 @@ class ConvLoRA(nn.Module, LoRALayer):
247247
def __init__(self, conv_module, in_channels, out_channels, kernel_size, r=0, lora_alpha=1, lora_dropout=0., merge_weights=True, **kwargs):
248248
super(ConvLoRA, self).__init__()
249249
self.conv = conv_module(in_channels, out_channels, kernel_size, **kwargs)
250+
for name, param in self.conv.named_parameters():
251+
self.register_parameter(name, param)
250252
LoRALayer.__init__(self, r=r, lora_alpha=lora_alpha, lora_dropout=lora_dropout, merge_weights=merge_weights)
251253
assert isinstance(kernel_size, int)
252254
# Actual trainable parameters

0 commit comments

Comments
 (0)