Skip to content

Commit

Permalink
chore: fix typos, remove debug print
Browse files Browse the repository at this point in the history
  • Loading branch information
kohya-ss committed Aug 22, 2024
1 parent b0a9808 commit bf9f798
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions networks/flux_extract_lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,10 @@ def str_to_dtype(p):
logger.info("Using memory efficient safe_open")
open_fn = lambda fn: MemoryEfficientSafeOpen(fn)

with open_fn(model_org) as fo:
with open_fn(model_org) as f_org:
# filter keys
keys = []
for key in fo.keys():
for key in f_org.keys():
if not ("single_block" in key or "double_block" in key):
continue
if ".bias" in key:
Expand All @@ -80,11 +80,11 @@ def str_to_dtype(p):
continue
keys.append(key)

with open_fn(model_tuned) as ft:
with open_fn(model_tuned) as f_tuned:
for key in tqdm(keys):
# get tensors and calculate difference
value_o = fo.get_tensor(key)
value_t = ft.get_tensor(key)
value_o = f_org.get_tensor(key)
value_t = f_tuned.get_tensor(key)
mat = value_t.to(calc_dtype) - value_o.to(calc_dtype)
del value_o, value_t

Expand Down Expand Up @@ -114,7 +114,7 @@ def str_to_dtype(p):
U = U.to(store_device, dtype=save_dtype).contiguous()
Vh = Vh.to(store_device, dtype=save_dtype).contiguous()

print(f"key: {key}, U: {U.size()}, Vh: {Vh.size()}")
# print(f"key: {key}, U: {U.size()}, Vh: {Vh.size()}")
lora_weights[key] = (U, Vh)
del mat, U, S, Vh

Expand Down

0 comments on commit bf9f798

Please sign in to comment.