Fix control lora on fp8.

This commit is contained in:
comfyanonymous 2023-12-04 13:47:41 -05:00
parent be3468ddd5
commit 26b1c0a771
1 changed files with 2 additions and 2 deletions

View File

@ -208,7 +208,7 @@ class ControlLoraOps:
def forward(self, input): def forward(self, input):
if self.up is not None: if self.up is not None:
return torch.nn.functional.linear(input, self.weight.to(input.device) + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), self.bias) return torch.nn.functional.linear(input, self.weight.to(input.dtype).to(input.device) + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), self.bias)
else: else:
return torch.nn.functional.linear(input, self.weight.to(input.device), self.bias) return torch.nn.functional.linear(input, self.weight.to(input.device), self.bias)
@ -247,7 +247,7 @@ class ControlLoraOps:
def forward(self, input): def forward(self, input):
if self.up is not None: if self.up is not None:
return torch.nn.functional.conv2d(input, self.weight.to(input.device) + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), self.bias, self.stride, self.padding, self.dilation, self.groups) return torch.nn.functional.conv2d(input, self.weight.to(input.dtype).to(input.device) + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), self.bias, self.stride, self.padding, self.dilation, self.groups)
else: else:
return torch.nn.functional.conv2d(input, self.weight.to(input.device), self.bias, self.stride, self.padding, self.dilation, self.groups) return torch.nn.functional.conv2d(input, self.weight.to(input.device), self.bias, self.stride, self.padding, self.dilation, self.groups)