From 6fb084f39de495ca2cdfd482720069378d6e68ae Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Sat, 15 Jul 2023 00:45:38 -0400 Subject: [PATCH] Reduce floating point rounding errors in loras. --- comfy/sd.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/comfy/sd.py b/comfy/sd.py index bef4e8ef..0cd75833 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -342,7 +342,9 @@ class ModelPatcher: if key not in self.backup: self.backup[key] = weight.clone() - weight[:] = self.calculate_weight(self.patches[key], weight.clone(), key) + temp_weight = weight.to(torch.float32, copy=True) + weight[:] = self.calculate_weight(self.patches[key], temp_weight, key).to(weight.dtype) + del temp_weight return self.model def calculate_weight(self, patches, weight, key):