Skip to content

Commit

Permalink
Fix potential memory leak.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Aug 26, 2024
1 parent 9230f65 commit c681294
Showing 1 changed file with 13 additions and 5 deletions.
18 changes: 13 additions & 5 deletions comfy/model_patcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,12 +319,21 @@ def load(self, device_to=None, lowvram_model_memory=0, force_patch_weights=False
mem_counter = 0
patch_counter = 0
lowvram_counter = 0
load_completely = []
loading = []
for n, m in self.model.named_modules():
if hasattr(m, "comfy_cast_weights") or hasattr(m, "weight"):
loading.append((comfy.model_management.module_size(m), n, m))

load_completely = []
loading.sort(reverse=True)
for x in loading:
n = x[1]
m = x[2]
module_mem = x[0]

lowvram_weight = False

if not full_load and hasattr(m, "comfy_cast_weights"):
module_mem = comfy.model_management.module_size(m)
if mem_counter + module_mem >= lowvram_model_memory:
lowvram_weight = True
lowvram_counter += 1
Expand Down Expand Up @@ -356,9 +365,8 @@ def load(self, device_to=None, lowvram_model_memory=0, force_patch_weights=False
wipe_lowvram_weight(m)

if hasattr(m, "weight"):
mem_used = comfy.model_management.module_size(m)
mem_counter += mem_used
load_completely.append((mem_used, n, m))
mem_counter += module_mem
load_completely.append((module_mem, n, m))

load_completely.sort(reverse=True)
for x in load_completely:
Expand Down

0 comments on commit c681294

Please sign in to comment.