File tree Expand file tree Collapse file tree 1 file changed +16
-9
lines changed Expand file tree Collapse file tree 1 file changed +16
-9
lines changed Original file line number Diff line number Diff line change @@ -414,16 +414,23 @@ def from_pretrained(cls,
414414 # Use file lock to prevent race conditions when multiple processes
415415 # try to import/cache the same remote model config file
416416 with config_file_lock ():
417- pretrained_config = transformers .AutoConfig .from_pretrained (
418- checkpoint_dir ,
419- trust_remote_code = trust_remote_code ,
420- )
417+ # When handling the case where model_format is TLLM_ENGINE
418+ # send cyclic requests to the NONE URL.
419+ if checkpoint_dir is not None :
420+ pretrained_config = transformers .AutoConfig .from_pretrained (
421+ checkpoint_dir ,
422+ trust_remote_code = trust_remote_code ,
423+ )
421424
422- # Find the cache path by looking for the config.json file which should be in all
423- # huggingface models
424- model_dir = Path (
425- transformers .utils .hub .cached_file (checkpoint_dir ,
426- 'config.json' )).parent
425+ # Find the cache path by looking for the config.json file which should be in all
426+ # huggingface models
427+ model_dir = Path (
428+ transformers .utils .hub .cached_file (checkpoint_dir ,
429+ 'config.json' )).parent
430+ else :
431+ raise ValueError (
432+ "checkpoint_dir is None. Cannot load model config without a valid checkpoint directory."
433+ )
427434
428435 quant_config = QuantConfig ()
429436 layer_quant_config = None
You can’t perform that action at this time.
0 commit comments