Skip to content

Commit

Permalink
Add missing network argument for split_qkv
Browse files Browse the repository at this point in the history
  • Loading branch information
bmaltais committed Aug 24, 2024
1 parent d2e0917 commit f066bfd
Showing 1 changed file with 7 additions and 0 deletions.
7 changes: 7 additions & 0 deletions kohya_gui/lora_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,7 @@ def save_configuration(
guidance_scale,
mem_eff_save,
apply_t5_attn_mask,
split_qkv,
):
# Get list of function parameters and values
parameters = list(locals().items())
Expand Down Expand Up @@ -483,6 +484,7 @@ def open_configuration(
guidance_scale,
mem_eff_save,
apply_t5_attn_mask,
split_qkv,
training_preset,
):
# Get list of function parameters and their values
Expand Down Expand Up @@ -739,6 +741,7 @@ def train_model(
guidance_scale,
mem_eff_save,
apply_t5_attn_mask,
split_qkv,
):
# Get list of function parameters and values
parameters = list(locals().items())
Expand Down Expand Up @@ -1085,6 +1088,9 @@ def train_model(
f"train_blocks is currently set to '{train_blocks}'. split_mode is enabled, forcing train_blocks to 'single'."
)
kohya_lora_vars["train_blocks"] = "single"
if split_qkv:
kohya_lora_vars["split_qkv"] = True

for key, value in kohya_lora_vars.items():
if value:
network_args += f" {key}={value}"
Expand Down Expand Up @@ -2553,6 +2559,7 @@ def update_LoRA_settings(
flux1_training.guidance_scale,
flux1_training.mem_eff_save,
flux1_training.apply_t5_attn_mask,
flux1_training.split_qkv,
]

configuration.button_open_config.click(
Expand Down

0 comments on commit f066bfd

Please sign in to comment.