not compatible with FP8?
ERROR:root:!!! Exception during processing !!!
ERROR:root:Traceback (most recent call last):
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\execution.py", line 153, in recursive_execute
output_data, output_ui = get_output_data(obj, input_data_all)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\execution.py", line 83, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\execution.py", line 76, in map_node_over_list
results.append(getattr(obj, func)(**slice_dict(input_data_all, i)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_UltimateSDUpscale\nodes.py", line 124, in upscale
processed = script.run(p=sdprocessing, _=None, tile_width=tile_width, tile_height=tile_height,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_UltimateSDUpscale\repositories\ultimate_sd_upscale\scripts\ultimate-upscale.py", line 553, in run
upscaler.process()
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_UltimateSDUpscale\repositories\ultimate_sd_upscale\scripts\ultimate-upscale.py", line 136, in process
self.image = self.redraw.start(self.p, self.image, self.rows, self.cols)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_UltimateSDUpscale\repositories\ultimate_sd_upscale\scripts\ultimate-upscale.py", line 243, in start
return self.linear_process(p, image, rows, cols)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_UltimateSDUpscale\repositories\ultimate_sd_upscale\scripts\ultimate-upscale.py", line 178, in linear_process
processed = processing.process_images(p)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_UltimateSDUpscale\modules\processing.py", line 119, in process_images
(samples,) = common_ksampler(p.model, p.seed, p.steps, p.cfg, p.sampler_name,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\nodes.py", line 1269, in common_ksampler
samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-Impact-Pack\modules\impact\sample_error_enhancer.py", line 9, in informative_sample
return original_sample(*args, **kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-AnimateDiff-Evolved\animatediff\sampling.py", line 242, in motion_sample
return orig_comfy_sample(model, noise, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\comfy\sample.py", line 93, in sample
real_model, positive_copy, negative_copy, noise_mask, models = prepare_sampling(model, noise.shape, positive, negative, noise_mask)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\comfy\sample.py", line 86, in prepare_sampling
comfy.model_management.load_models_gpu([model] + models, model.memory_required([noise_shape[0] * 2] + list(noise_shape[1:])) + inference_memory)
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\comfy\model_management.py", line 410, in load_models_gpu
cur_loaded_model = loaded_model.model_load(lowvram_model_memory)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\ComfyUI\comfy\model_management.py", line 297, in model_load
device_map = accelerate.infer_auto_device_map(self.real_model, max_memory={0: "{}MiB".format(lowvram_model_memory // (1024 * 1024)), "cpu": "16GiB"})
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\accelerate\utils\modeling.py", line 978, in infer_auto_device_map
module_sizes = compute_module_sizes(model, dtype=dtype, special_dtypes=special_dtypes)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\accelerate\utils\modeling.py", line 616, in compute_module_sizes
size = tensor.numel() * dtype_byte_size(tensor.dtype)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Product\ComfyUI\ComfyUI_windows_portable\python_embeded\Lib\site-packages\accelerate\utils\modeling.py", line 115, in dtype_byte_size
raise ValueError(f"dtype is not a valid dtype: {dtype}.")
ValueError: dtype is not a valid dtype: torch.float8_e4m3fn.
And it works fine if you remove this node?
It works when this node is not used.
Does it happen when you use a KSampler normally? This is just using KSamplers behind the scenes.