Remove some unused imports.

This commit is contained in:
comfyanonymous 2024-05-27 19:03:56 -04:00
parent 34030fed92
commit 0920e0e5fe
10 changed files with 2 additions and 16 deletions

View File

@ -17,7 +17,6 @@
""" """
import math import math
import numpy as np
import torch import torch
from torch import nn from torch import nn
from .common import AttnBlock, LayerNorm2d_op, ResBlock, FeedForwardBlock, TimestepBlock from .common import AttnBlock, LayerNorm2d_op, ResBlock, FeedForwardBlock, TimestepBlock

View File

@ -18,7 +18,6 @@
import torch import torch
from torch import nn from torch import nn
import numpy as np
import math import math
from .common import AttnBlock, LayerNorm2d_op, ResBlock, FeedForwardBlock, TimestepBlock from .common import AttnBlock, LayerNorm2d_op, ResBlock, FeedForwardBlock, TimestepBlock
# from .controlnet import ControlNetDeliverer # from .controlnet import ControlNetDeliverer

View File

@ -1,6 +1,4 @@
import torch import torch
# import pytorch_lightning as pl
import torch.nn.functional as F
from contextlib import contextmanager from contextlib import contextmanager
from typing import Any, Dict, List, Optional, Tuple, Union from typing import Any, Dict, List, Optional, Tuple, Union

View File

@ -3,7 +3,7 @@ import torch
import torch.nn.functional as F import torch.nn.functional as F
from torch import nn, einsum from torch import nn, einsum
from einops import rearrange, repeat from einops import rearrange, repeat
from typing import Optional, Any from typing import Optional
import logging import logging
from .diffusionmodules.util import AlphaBlender, timestep_embedding from .diffusionmodules.util import AlphaBlender, timestep_embedding

View File

@ -2,7 +2,6 @@ import psutil
import logging import logging
from enum import Enum from enum import Enum
from comfy.cli_args import args from comfy.cli_args import args
import comfy.utils
import torch import torch
import sys import sys
import platform import platform

View File

@ -14,7 +14,6 @@ import comfy.utils
from . import clip_vision from . import clip_vision
from . import gligen from . import gligen
from . import diffusers_convert from . import diffusers_convert
from . import model_base
from . import model_detection from . import model_detection
from . import sd1_clip from . import sd1_clip

View File

@ -1,5 +1,4 @@
from comfy import sd1_clip from comfy import sd1_clip
import torch
import os import os
class SD2ClipHModel(sd1_clip.SDClipModel): class SD2ClipHModel(sd1_clip.SDClipModel):

View File

@ -1,10 +1,5 @@
import math
import torch
import torch.nn.functional as F
import comfy.model_management
from kornia.filters import canny from kornia.filters import canny
import comfy.model_management
class Canny: class Canny:

View File

@ -4,7 +4,6 @@ import torch.nn.functional as F
import math import math
from einops import rearrange, repeat from einops import rearrange, repeat
import os
from comfy.ldm.modules.attention import optimized_attention from comfy.ldm.modules.attention import optimized_attention
import comfy.samplers import comfy.samplers

View File

@ -1,5 +1,4 @@
import torch import torch
import nodes
import comfy.utils import comfy.utils
class SD_4XUpscale_Conditioning: class SD_4XUpscale_Conditioning: