Skip to content

Commit

Permalink
Add a node to set the model compute dtype for debugging.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Feb 15, 2025
1 parent 1cd6cd6 commit 2e21122
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 0 deletions.
21 changes: 21 additions & 0 deletions comfy_extras/nodes_model_advanced.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import comfy.latent_formats
import nodes
import torch
import node_helpers


class LCM(comfy.model_sampling.EPS):
def calculate_denoised(self, sigma, model_output, model_input):
Expand Down Expand Up @@ -294,6 +296,24 @@ def rescale_cfg(args):
m.set_model_sampler_cfg_function(rescale_cfg)
return (m, )

class ModelComputeDtype:
@classmethod
def INPUT_TYPES(s):
return {"required": { "model": ("MODEL",),
"dtype": (["default", "fp32", "fp16", "bf16"],),
}}

RETURN_TYPES = ("MODEL",)
FUNCTION = "patch"

CATEGORY = "advanced/debug/model"

def patch(self, model, dtype):
m = model.clone()
m.set_model_compute_dtype(node_helpers.string_to_torch_dtype(dtype))
return (m, )


NODE_CLASS_MAPPINGS = {
"ModelSamplingDiscrete": ModelSamplingDiscrete,
"ModelSamplingContinuousEDM": ModelSamplingContinuousEDM,
Expand All @@ -303,4 +323,5 @@ def rescale_cfg(args):
"ModelSamplingAuraFlow": ModelSamplingAuraFlow,
"ModelSamplingFlux": ModelSamplingFlux,
"RescaleCFG": RescaleCFG,
"ModelComputeDtype": ModelComputeDtype,
}
9 changes: 9 additions & 0 deletions node_helpers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import hashlib
import torch

from comfy.cli_args import args

Expand Down Expand Up @@ -35,3 +36,11 @@ def hasher():
"sha512": hashlib.sha512
}
return hashfuncs[args.default_hashing_function]

def string_to_torch_dtype(string):
if string == "fp32":
return torch.float32
if string == "fp16":
return torch.float16
if string == "bf16":
return torch.bfloat16

0 comments on commit 2e21122

Please sign in to comment.