|
|
|
@ -208,7 +208,7 @@ class ControlLoraOps:
|
|
|
|
|
|
|
|
|
|
def forward(self, input): |
|
|
|
|
if self.up is not None: |
|
|
|
|
return torch.nn.functional.linear(input, self.weight.to(input.device) + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), self.bias) |
|
|
|
|
return torch.nn.functional.linear(input, self.weight.to(input.dtype).to(input.device) + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), self.bias) |
|
|
|
|
else: |
|
|
|
|
return torch.nn.functional.linear(input, self.weight.to(input.device), self.bias) |
|
|
|
|
|
|
|
|
@ -247,7 +247,7 @@ class ControlLoraOps:
|
|
|
|
|
|
|
|
|
|
def forward(self, input): |
|
|
|
|
if self.up is not None: |
|
|
|
|
return torch.nn.functional.conv2d(input, self.weight.to(input.device) + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), self.bias, self.stride, self.padding, self.dilation, self.groups) |
|
|
|
|
return torch.nn.functional.conv2d(input, self.weight.to(input.dtype).to(input.device) + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), self.bias, self.stride, self.padding, self.dilation, self.groups) |
|
|
|
|
else: |
|
|
|
|
return torch.nn.functional.conv2d(input, self.weight.to(input.device), self.bias, self.stride, self.padding, self.dilation, self.groups) |
|
|
|
|
|
|
|
|
|