Skip to content

Commit

Permalink
Merge branch 'Kosinkadink:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
liubo0902 authored Oct 23, 2024
2 parents 53015b1 + 261fac8 commit b73c28b
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 3 deletions.
20 changes: 19 additions & 1 deletion animatediff/model_injection.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@ def __init__(self, m: ModelPatcher):
self.motion_injection_params: InjectionParams = InjectionParams()
self.sample_settings: SampleSettings = SampleSettings()
self.motion_models: MotionModelGroup = None
# backwards-compatible calculate_weight
if hasattr(comfy.lora, "calculate_weight"):
self.do_calculate_weight = comfy.lora.calculate_weight
else:
self.do_calculate_weight = self.calculate_weight


def clone(self, hooks_only=False):
cloned = ModelPatcherAndInjector(self)
Expand Down Expand Up @@ -379,7 +385,7 @@ def patch_hooked_weight_to_device(self, lora_hooks: LoraHookGroup, combined_patc

# TODO: handle model_params_lowvram stuff if necessary
temp_weight = comfy.model_management.cast_to_device(weight, weight.device, torch.float32, copy=True)
out_weight = self.calculate_weight(combined_patches[key], temp_weight, key).to(weight.dtype)
out_weight = self.do_calculate_weight(combined_patches[key], temp_weight, key).to(weight.dtype)
if self.lora_hook_mode == LoraHookMode.MAX_SPEED:
self.cached_hooked_patches.setdefault(lora_hooks, {})
self.cached_hooked_patches[lora_hooks][key] = out_weight
Expand Down Expand Up @@ -809,6 +815,7 @@ def load(self, device_to=None, lowvram_model_memory=0, *args, **kwargs):
to_return = super().load(device_to=device_to, lowvram_model_memory=lowvram_model_memory, *args, **kwargs)
if lowvram_model_memory > 0:
self._patch_lowvram_extras(device_to=device_to)
self._handle_float8_pe_tensors()
return to_return

def _patch_lowvram_extras(self, device_to=None):
Expand All @@ -828,6 +835,17 @@ def _patch_lowvram_extras(self, device_to=None):
if device_to is not None:
comfy.utils.set_attr(self.model, key, comfy.utils.get_attr(self.model, key).to(device_to))

def _handle_float8_pe_tensors(self):
remaining_tensors = list(self.model.state_dict().keys())
pe_tensors = [x for x in remaining_tensors if '.pe' in x]
is_first = True
for key in pe_tensors:
if is_first:
is_first = False
if comfy.utils.get_attr(self.model, key).dtype not in [torch.float8_e5m2, torch.float8_e4m3fn]:
break
comfy.utils.set_attr(self.model, key, comfy.utils.get_attr(self.model, key).half())

def pre_run(self, model: ModelPatcherAndInjector):
self.cleanup()
self.model.set_scale(self.scale_multival, self.per_block_list)
Expand Down
2 changes: 1 addition & 1 deletion animatediff/nodes_scheduling.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def create_schedule(self, prompts: str, clip, print_schedule=False, max_length:
options = PromptOptions(interp=tensor_interp, prepend_text=prepend_text, append_text=append_text,
values_replace=values_replace, print_schedule=print_schedule)
conditioning = evaluate_prompt_schedule(prompts, max_length, clip, options)
return (conditioning)
return (conditioning,)


class ValueSchedulingLatentsNode:
Expand Down
1 change: 1 addition & 0 deletions animatediff/scheduling.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,6 +372,7 @@ def handle_prompt_interpolation(pairs: list[InputPair], length: int, clip: CLIP,
prev_holder = CondHolder(idx=i, prompt=real_prompt, raw_prompt=prev_holder.raw_prompt, cond=cond, pooled=pooled, hold=prev_holder.hold)
real_cond[i] = prev_holder.cond
real_pooled[i] = prev_holder.pooled
real_holders[i] = prev_holder
else:
prev_holder = real_holders[i]
pbar.update(1)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[project]
name = "comfyui-animatediff-evolved"
description = "Improved AnimateDiff integration for ComfyUI."
version = "1.2.0"
version = "1.2.3"
license = { file = "LICENSE" }
dependencies = []

Expand Down

0 comments on commit b73c28b

Please sign in to comment.