diff --git a/src/llmcompressor/pytorch/model_load/helpers.py b/src/llmcompressor/pytorch/model_load/helpers.py index b4390afc72..80aeeea7b8 100644 --- a/src/llmcompressor/pytorch/model_load/helpers.py +++ b/src/llmcompressor/pytorch/model_load/helpers.py @@ -144,7 +144,7 @@ def load_safetensors_state_dict(file_path: str) -> Dict[str, torch.Tensor]: def copy_python_files_from_model_cache(model, save_path: str): config = model.config cache_path = None - if hasattr(config, "_name_or_path"): + if hasattr(config, "_name_or_path") and len(config._name_or_path.strip()) > 0: import os import shutil diff --git a/src/llmcompressor/transformers/utils/helpers.py b/src/llmcompressor/transformers/utils/helpers.py index 1834c19b00..9a3b9059d3 100644 --- a/src/llmcompressor/transformers/utils/helpers.py +++ b/src/llmcompressor/transformers/utils/helpers.py @@ -57,7 +57,12 @@ def infer_recipe_from_model_path(model_path: Union[str, Path]) -> Optional[str]: - Hugging face model ID :return: The path to the recipe file if found, None otherwise. """ - model_path = model_path.as_posix() if isinstance(model_path, Path) else model_path + model_path = ( + model_path.as_posix() if isinstance(model_path, Path) else model_path.strip() + ) + if model_path == "": + logger.debug("got path_or_name=" "unable to find recipe") + return None if os.path.isdir(model_path) or os.path.isfile(model_path): # Model path is a local path to the model directory or file