ComfyUI_fabric
ComfyUI_fabric copied to clipboard
Error when Feedback Percentage equal to or greater than 85 and pos/neg latents
Error if Feedback Percentage is at or above 85 when positive or negative latent plugged into slot.
Error occurred when executing KSamplerFABRIC:
'SD15' object has no attribute 'beta_schedule'
File "/home/poisenbery/AI_Stuff/ComfyUI/execution.py", line 153, in recursive_execute
output_data, output_ui = get_output_data(obj, input_data_all)
File "/home/poisenbery/AI_Stuff/ComfyUI/execution.py", line 83, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True)
File "/home/poisenbery/AI_Stuff/ComfyUI/execution.py", line 76, in map_node_over_list
results.append(getattr(obj, func)(**slice_dict(input_data_all, i)))
File "/home/poisenbery/AI_Stuff/ComfyUI/custom_nodes/ComfyUI_fabric/nodes.py", line 181, in sample
return KSamplerFABRICAdv().sample(*args, **kwargs)
File "/home/poisenbery/AI_Stuff/ComfyUI/custom_nodes/ComfyUI_fabric/nodes.py", line 138, in sample
return fabric_sample(*args, **kwargs)
File "/home/poisenbery/AI_Stuff/ComfyUI/custom_nodes/ComfyUI_fabric/fabric/fabric.py", line 52, in fabric_sample
samples = KSamplerAdvanced().sample(model_patched, add_noise, noise_seed, steps, cfg, sampler_name, scheduler, positive,
File "/home/poisenbery/AI_Stuff/ComfyUI/nodes.py", line 1333, in sample
return common_ksampler(model, noise_seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise, disable_noise=disable_noise, start_step=start_at_step, last_step=end_at_step, force_full_denoise=force_full_denoise)
File "/home/poisenbery/AI_Stuff/ComfyUI/nodes.py", line 1269, in common_ksampler
samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
File "/home/poisenbery/AI_Stuff/ComfyUI/custom_nodes/ComfyUI-AnimateDiff-Evolved/animatediff/sampling.py", line 178, in animatediff_sample
return orig_comfy_sample(model, noise, *args, **kwargs)
File "/home/poisenbery/AI_Stuff/ComfyUI/custom_nodes/ComfyUI-Impact-Pack/modules/impact/sample_error_enhancer.py", line 9, in informative_sample
return original_sample(*args, **kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations.
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/sample.py", line 100, in sample
samples = sampler.sample(noise, positive_copy, negative_copy, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed)
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/samplers.py", line 711, in sample
return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed)
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/samplers.py", line 617, in sample
samples = sampler.sample(model_wrap, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar)
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/samplers.py", line 556, in sample
samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options)
File "/home/poisenbery/AI_Stuff/ComfyUI/venv/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/k_diffusion/sampling.py", line 580, in sample_dpmpp_2m
denoised = model(x, sigmas[i] * s_in, **extra_args)
File "/home/poisenbery/AI_Stuff/ComfyUI/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
return forward_call(*args, **kwargs)
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/samplers.py", line 277, in forward
out = self.inner_model(x, sigma, cond=cond, uncond=uncond, cond_scale=cond_scale, model_options=model_options, seed=seed)
File "/home/poisenbery/AI_Stuff/ComfyUI/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
return forward_call(*args, **kwargs)
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/samplers.py", line 267, in forward
return self.apply_model(*args, **kwargs)
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/samplers.py", line 264, in apply_model
out = sampling_function(self.inner_model, x, timestep, uncond, cond, cond_scale, model_options=model_options, seed=seed)
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/samplers.py", line 252, in sampling_function
cond, uncond = calc_cond_uncond_batch(model, cond, uncond, x, timestep, model_options)
File "/home/poisenbery/AI_Stuff/ComfyUI/comfy/samplers.py", line 228, in calc_cond_uncond_batch
output = model_options['model_function_wrapper'](model.apply_model, {"input": input_x, "timestep": timestep_, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks)
File "/home/poisenbery/AI_Stuff/ComfyUI/custom_nodes/ComfyUI_fabric/fabric/fabric.py", line 244, in unet_wrapper
neg_zs = noise_latents(model_patched, neg_lats, current_ts, ts_interval)
File "/home/poisenbery/AI_Stuff/ComfyUI/custom_nodes/ComfyUI_fabric/fabric/fabric.py", line 348, in noise_latents
z_ref = q_sample(model, latent.unsqueeze(0), ts, ts_interval)
File "/home/poisenbery/AI_Stuff/ComfyUI/custom_nodes/ComfyUI_fabric/fabric/unet.py", line 19, in q_sample
beta_schedule = config.beta_schedule
This should be fixed now.