From a5c0f597bf2ec67bb0f779925e072bd6be700372 Mon Sep 17 00:00:00 2001 From: Akegarasu Date: Sat, 30 Dec 2023 14:22:07 +0800 Subject: [PATCH] update --- mikazuki/utils/devices.py | 3 ++- mikazuki/utils/train_utils.py | 12 ++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/mikazuki/utils/devices.py b/mikazuki/utils/devices.py index e922b75..f327c2a 100644 --- a/mikazuki/utils/devices.py +++ b/mikazuki/utils/devices.py @@ -20,6 +20,7 @@ def check_torch_gpu(): printable_devices.append(f"GPU {pos}: {torch.cuda.get_device_name(device)} ({round(torch.cuda.get_device_properties(device).total_memory / 1024 / 1024 / 1024)} GB)") log.info(f'Torch detected GPU: {torch.cuda.get_device_name(device)} VRAM {round(torch.cuda.get_device_properties(device).total_memory / 1024 / 1024)} Arch {torch.cuda.get_device_capability(device)} Cores {torch.cuda.get_device_properties(device).multi_processor_count}') else: - log.warn("Torch is not able to use GPU, please check your torch installation.\n Use --skip-prepare-environment to disable this check") + log.error("Torch is not able to use GPU, please check your torch installation.\n Use --skip-prepare-environment to disable this check") + log.error("!!!Torch 无法使用 GPU,您无法正常开始训练!!!\n您的显卡可能并不支持,或是 torch 安装有误。请检查您的 torch 安装。\n 使用 --skip-prepare-environment 可以跳过此检查") except Exception as e: log.error(f'Could not load torch: {e}') diff --git a/mikazuki/utils/train_utils.py b/mikazuki/utils/train_utils.py index 750514e..9da3dd6 100644 --- a/mikazuki/utils/train_utils.py +++ b/mikazuki/utils/train_utils.py @@ -18,6 +18,18 @@ def is_promopt_like(s): def validate_model(model_name: str): if os.path.exists(model_name): + try: + with open(model_name, "rb") as f: + content = f.read(1024 * 200) + if b"model.diffusion_model" in content or b"cond_stage_model.transformer.text_model" in content: + return True, "ok" + + if b"lora_unet" in content or b"lora_te" in content: + return False, "pretrained model is not a Stable Diffusion checkpoint / 校验失败:底模不是 Stable Diffusion 模型" + except Exception as e: + log.warn(f"model file {model_name} can't open: {e}") + return True, "" + return True, "ok" # huggerface model repo