diff --git a/llmfoundry/callbacks/hf_checkpointer.py b/llmfoundry/callbacks/hf_checkpointer.py index f24c9e28b1..4365a5b2e5 100644 --- a/llmfoundry/callbacks/hf_checkpointer.py +++ b/llmfoundry/callbacks/hf_checkpointer.py @@ -588,7 +588,7 @@ def tensor_hook( del new_base_model_instance else: new_model_instance = type(original_model)(new_config) - if new_model_instance.generation_config != None: + if new_model_instance.generation_config is not None: new_model_instance.generation_config.update( **original_model.generation_config.to_dict(), ) diff --git a/tests/a_scripts/inference/test_convert_composer_to_hf.py b/tests/a_scripts/inference/test_convert_composer_to_hf.py index 5fc9ad4769..4abee0113c 100644 --- a/tests/a_scripts/inference/test_convert_composer_to_hf.py +++ b/tests/a_scripts/inference/test_convert_composer_to_hf.py @@ -1675,12 +1675,9 @@ def __init__(self, config: PretrainedConfig): save_interval='1ba', ) - try: - checkpointer._save_checkpoint( - state=state, - logger=logger, - upload_to_save_folder=False, - register_to_mlflow=False, - ) - except Exception as e: - print(f'Test failed: {e} when generation_config is {generation_config}') + checkpointer._save_checkpoint( + state=state, + logger=logger, + upload_to_save_folder=False, + register_to_mlflow=False, + )