runtime error

Exit code: 1. Reason: tokenization_auto.py", line 834, in from_pretrained tokenizer_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs) File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 666, in get_tokenizer_config resolved_config_file = cached_file( File "/usr/local/lib/python3.10/site-packages/transformers/utils/hub.py", line 402, in cached_file resolved_file = hf_hub_download( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn return fn(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 972, in hf_hub_download hf_headers = build_hf_headers( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py", line 101, in inner_f return f(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn return fn(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_headers.py", line 126, in build_hf_headers token_to_send = get_token_to_send(token) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_headers.py", line 154, in get_token_to_send cached_token = get_token() File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_auth.py", line 49, in get_token return _get_token_from_google_colab() or _get_token_from_environment() or _get_token_from_file() File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_auth.py", line 123, in _get_token_from_file return _clean_token(Path(constants.HF_TOKEN_PATH).read_text()) File "/usr/local/lib/python3.10/pathlib.py", line 1134, in read_text with self.open(mode='r', encoding=encoding, errors=errors) as f: File "/usr/local/lib/python3.10/pathlib.py", line 1119, in open return self._accessor.open(self, mode, buffering, encoding, errors, NotADirectoryError: [Errno 20] Not a directory: '/dev/null/token'

Container logs:

Fetching error logs...