Does anyone have the difficulties using it on Colab?
OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like meta-llama/Meta-Llama-3-8B-Instruct is not the path to a directory containing a file named config.json.
Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'.
I always got this error but it seems my token is correct. I am wondering why?
Much appreciated
Facing the same issue, did you find the solution?
I faced same issue, althouhg I recieved an email for access granted from meta.
Could you advice me how I solve this?
import transformers
import torch
model_id = "meta-llama/Meta-Llama-3.1-70B"
pipeline = transformers.pipeline(
    "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto"
)
pipeline("Hey how are you doing today?")
HTTPError                                 Traceback (most recent call last)
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/utils/_errors.py:304, in hf_raise_for_status(response, endpoint_name)
    303 try:
--> 304     response.raise_for_status()
    305 except HTTPError as e:
File /opt/conda/lib/python3.8/site-packages/requests/models.py:1024, in Response.raise_for_status(self)
   1023 if http_error_msg:
-> 1024     raise HTTPError(http_error_msg, response=self)
HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/meta-llama/Meta-Llama-3.1-70B/resolve/main/config.json
The above exception was the direct cause of the following exception:
HfHubHTTPError                            Traceback (most recent call last)
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/file_download.py:1751, in _get_metadata_or_catch_error(repo_id, filename, repo_type, revision, endpoint, proxies, etag_timeout, headers, token, local_files_only, relative_filename, storage_folder)
   1750 try:
-> 1751     metadata = get_hf_file_metadata(
   1752         url=url, proxies=proxies, timeout=etag_timeout, headers=headers, token=token
   1753     )
   1754 except EntryNotFoundError as http_error:
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
    112     kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/file_download.py:1673, in get_hf_file_metadata(url, token, proxies, timeout, library_name, library_version, user_agent, headers)
   1672 # Retrieve metadata
-> 1673 r = _request_wrapper(
   1674     method="HEAD",
   1675     url=url,
   1676     headers=headers,
   1677     allow_redirects=False,
   1678     follow_relative_redirects=True,
   1679     proxies=proxies,
   1680     timeout=timeout,
   1681 )
   1682 hf_raise_for_status(r)
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/file_download.py:376, in _request_wrapper(method, url, follow_relative_redirects, **params)
    375 if follow_relative_redirects:
--> 376     response = _request_wrapper(
    377         method=method,
    378         url=url,
    379         follow_relative_redirects=False,
    380         **params,
    381     )
    383     # If redirection, we redirect only relative paths.
    384     # This is useful in case of a renamed repository.
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/file_download.py:400, in _request_wrapper(method, url, follow_relative_redirects, **params)
    399 response = get_session().request(method=method, url=url, **params)
--> 400 hf_raise_for_status(response)
    401 return response
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/utils/_errors.py:367, in hf_raise_for_status(response, endpoint_name)
    361     message = (
    362         f"\n\n{response.status_code} Forbidden: {error_message}."
    363         + f"\nCannot access content at: {response.url}."
    364         + "\nIf you are trying to create or update content, "
    365         + "make sure you have a token with the write role."
    366     )
--> 367     raise HfHubHTTPError(message, response=response) from e
    369 # Convert HTTPError into a HfHubHTTPError to display request information
    370 # as well (request id and/or server error message)
HfHubHTTPError: (Request ID: Root=1-66bb3fe9-1920d7ac7779923d6c7a7d63;e91af24b-e089-421c-b491-227b23be8494)
403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
Cannot access content at: https://huggingface.co/meta-llama/Meta-Llama-3.1-70B/resolve/main/config.json.
If you are trying to create or update content, make sure you have a token with the write role.
The above exception was the direct cause of the following exception:
LocalEntryNotFoundError                   Traceback (most recent call last)
File /opt/conda/lib/python3.8/site-packages/transformers/utils/hub.py:402, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
    400 try:
    401     # Load from URL or cache if already cached
--> 402     resolved_file = hf_hub_download(
    403         path_or_repo_id,
    404         filename,
    405         subfolder=None if len(subfolder) == 0 else subfolder,
    406         repo_type=repo_type,
    407         revision=revision,
    408         cache_dir=cache_dir,
    409         user_agent=user_agent,
    410         force_download=force_download,
    411         proxies=proxies,
    412         resume_download=resume_download,
    413         token=token,
    414         local_files_only=local_files_only,
    415     )
    416 except GatedRepoError as e:
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/utils/_deprecation.py:101, in _deprecate_arguments.._inner_deprecate_positional_args..inner_f(*args, **kwargs)
    100     warnings.warn(message, FutureWarning)
--> 101 return f(*args, **kwargs)
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
    112     kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/file_download.py:1240, in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, user_agent, force_download, proxies, etag_timeout, token, local_files_only, headers, endpoint, legacy_cache_layout, resume_download, force_filename, local_dir_use_symlinks)
   1239 else:
-> 1240     return _hf_hub_download_to_cache_dir(
   1241         # Destination
   1242         cache_dir=cache_dir,
   1243         # File info
   1244         repo_id=repo_id,
   1245         filename=filename,
   1246         repo_type=repo_type,
   1247         revision=revision,
   1248         # HTTP info
   1249         endpoint=endpoint,
   1250         etag_timeout=etag_timeout,
   1251         headers=headers,
   1252         proxies=proxies,
   1253         token=token,
   1254         # Additional options
   1255         local_files_only=local_files_only,
   1256         force_download=force_download,
   1257     )
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/file_download.py:1347, in _hf_hub_download_to_cache_dir(cache_dir, repo_id, filename, repo_type, revision, endpoint, etag_timeout, headers, proxies, token, local_files_only, force_download)
   1346     # Otherwise, raise appropriate error
-> 1347     _raise_on_head_call_error(head_call_error, force_download, local_files_only)
   1349 # From now on, etag, commit_hash, url and size are not None.
File /opt/conda/lib/python3.8/site-packages/huggingface_hub/file_download.py:1857, in _raise_on_head_call_error(head_call_error, force_download, local_files_only)
   1855 else:
   1856     # Otherwise: most likely a connection issue or Hub downtime => let's warn the user
-> 1857     raise LocalEntryNotFoundError(
   1858         "An error happened while trying to locate the file on the Hub and we cannot find the requested files"
   1859         " in the local cache. Please check your connection and try again or make sure your Internet connection"
   1860         " is on."
   1861     ) from head_call_error
LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on.
The above exception was the direct cause of the following exception:
OSError                                   Traceback (most recent call last)
Cell In[17], line 6
      2 import torch
      4 model_id = "meta-llama/Meta-Llama-3.1-70B"
----> 6 pipeline = transformers.pipeline(
      7     "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto"
      8 )
      9 pipeline("Hey how are you doing today?")
File /opt/conda/lib/python3.8/site-packages/transformers/pipelines/init.py:805, in pipeline(task, model, config, tokenizer, feature_extractor, image_processor, framework, revision, use_fast, token, device, device_map, torch_dtype, trust_remote_code, model_kwargs, pipeline_class, **kwargs)
    802                 adapter_config = json.load(f)
    803                 model = adapter_config["base_model_name_or_path"]
--> 805     config = AutoConfig.from_pretrained(
    806         model, _from_pipeline=task, code_revision=code_revision, **hub_kwargs, **model_kwargs
    807     )
    808     hub_kwargs["_commit_hash"] = config._commit_hash
    810 custom_tasks = {}
File /opt/conda/lib/python3.8/site-packages/transformers/models/auto/configuration_auto.py:976, in AutoConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
    973 trust_remote_code = kwargs.pop("trust_remote_code", None)
    974 code_revision = kwargs.pop("code_revision", None)
--> 976 config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs)
    977 has_remote_code = "auto_map" in config_dict and "AutoConfig" in config_dict["auto_map"]
    978 has_local_code = "model_type" in config_dict and config_dict["model_type"] in CONFIG_MAPPING
File /opt/conda/lib/python3.8/site-packages/transformers/configuration_utils.py:632, in PretrainedConfig.get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
    630 original_kwargs = copy.deepcopy(kwargs)
    631 # Get config dict associated with the base config file
--> 632 config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
    633 if "_commit_hash" in config_dict:
    634     original_kwargs["_commit_hash"] = config_dict["_commit_hash"]
File /opt/conda/lib/python3.8/site-packages/transformers/configuration_utils.py:689, in PretrainedConfig._get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
    685 configuration_file = kwargs.pop("_configuration_file", CONFIG_NAME) if gguf_file is None else gguf_file
    687 try:
    688     # Load from local folder or from cache or download from model Hub and cache
--> 689     resolved_config_file = cached_file(
    690         pretrained_model_name_or_path,
    691         configuration_file,
    692         cache_dir=cache_dir,
    693         force_download=force_download,
    694         proxies=proxies,
    695         resume_download=resume_download,
    696         local_files_only=local_files_only,
    697         token=token,
    698         user_agent=user_agent,
    699         revision=revision,
    700         subfolder=subfolder,
    701         _commit_hash=commit_hash,
    702     )
    703     commit_hash = extract_commit_hash(resolved_config_file, commit_hash)
    704 except EnvironmentError:
    705     # Raise any environment error raise by cached_file. It will have a helpful error message adapted to
    706     # the original exception.
File /opt/conda/lib/python3.8/site-packages/transformers/utils/hub.py:445, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
    439     if (
    440         resolved_file is not None
    441         or not _raise_exceptions_for_missing_entries
    442         or not _raise_exceptions_for_connection_errors
    443     ):
    444         return resolved_file
--> 445     raise EnvironmentError(
    446         f"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load this file, couldn't find it in the"
    447         f" cached files and it looks like {path_or_repo_id} is not the path to a directory containing a file named"
    448         f" {full_filename}.\nCheckout your internet connection or see how to run the library in offline mode at"
    449         " 'https://huggingface.co/docs/transformers/installation#offline-mode'."
    450     ) from e
    451 except EntryNotFoundError as e:
    452     if not _raise_exceptions_for_missing_entries:
OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like meta-llama/Meta-Llama-3.1-70B is not the path to a directory containing a file named config.json.
Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'.