Add backend option to TorchCompileModel.

If you want to use the cudagraphs backend you need to: --disable-cuda-malloc

If you get other backends working feel free to make a PR to add them.
This commit is contained in:
comfyanonymous 2024-09-27 02:12:37 -04:00
parent d72e871cfa
commit 83b01f960a
1 changed files with 3 additions and 2 deletions

View File

@ -4,6 +4,7 @@ class TorchCompileModel:
@classmethod
def INPUT_TYPES(s):
return {"required": { "model": ("MODEL",),
"backend": (["inductor", "cudagraphs"],),
}}
RETURN_TYPES = ("MODEL",)
FUNCTION = "patch"
@ -11,9 +12,9 @@ class TorchCompileModel:
CATEGORY = "_for_testing"
EXPERIMENTAL = True
def patch(self, model):
def patch(self, model, backend):
m = model.clone()
m.add_object_patch("diffusion_model", torch.compile(model=m.get_model_object("diffusion_model")))
m.add_object_patch("diffusion_model", torch.compile(model=m.get_model_object("diffusion_model"), backend=backend))
return (m, )
NODE_CLASS_MAPPINGS = {