runtime error

ns/3.10.13/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 1363, in hf_hub_download raise LocalEntryNotFoundError( huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/user/app/app.py", line 230, in <module> main() File "/home/user/app/app.py", line 120, in main model = AutoModel.get_model( File "/home/user/app/lmflow/models/auto_model.py", line 16, in get_model return HFDecoderModel(model_args, *args, **kwargs) File "/home/user/app/lmflow/models/hf_decoder_model.py", line 220, in __init__ self.backend_model = AutoModelForCausalLM.from_pretrained( File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 471, in from_pretrained return model_class.from_pretrained( File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/modeling_utils.py", line 2458, in from_pretrained resolved_archive_file, sharded_metadata = get_checkpoint_shard_files( File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/utils/hub.py", line 925, in get_checkpoint_shard_files cached_filename = cached_file( File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/utils/hub.py", line 443, in cached_file raise EnvironmentError( OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like LMFlow/Full-Robin-7b-v2 is not the path to a directory containing a file named pytorch_model-00002-of-00002.bin. Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'.

Container logs:

Fetching error logs...