Cannot use model from transformers library
Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("automatic-speech-recognition", model="MediaTek-Research/Clairaudience")
Load model directly
from transformers import AutoProcessor, AutoModel
processor = AutoProcessor.from_pretrained("MediaTek-Research/Clairaudience")
model = AutoModel.from_pretrained("MediaTek-Research/Clairaudience")
Both of the above code snippets trigger the following error because there is no preprocessor_config.json file:
HTTPError Traceback (most recent call last)
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/utils/_errors.py:304, in hf_raise_for_status(response, endpoint_name)
303 try:
--> 304 response.raise_for_status()
305 except HTTPError as e:
File /opt/anaconda/lib/python3.11/site-packages/requests/models.py:1021, in Response.raise_for_status(self)
1020 if http_error_msg:
-> 1021 raise HTTPError(http_error_msg, response=self)
HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/MediaTek-Research/Clairaudience/resolve/main/preprocessor_config.json
The above exception was the direct cause of the following exception:
EntryNotFoundError Traceback (most recent call last)
File /opt/anaconda/lib/python3.11/site-packages/transformers/utils/hub.py:402, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
400 try:
401 # Load from URL or cache if already cached
--> 402 resolved_file = hf_hub_download(
403 path_or_repo_id,
404 filename,
405 subfolder=None if len(subfolder) == 0 else subfolder,
406 repo_type=repo_type,
407 revision=revision,
408 cache_dir=cache_dir,
409 user_agent=user_agent,
410 force_download=force_download,
411 proxies=proxies,
412 resume_download=resume_download,
413 token=token,
414 local_files_only=local_files_only,
415 )
416 except GatedRepoError as e:
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/utils/_deprecation.py:101, in _deprecate_arguments.._inner_deprecate_positional_args..inner_f(*args, **kwargs)
100 warnings.warn(message, FutureWarning)
--> 101 return f(*args, **kwargs)
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/file_download.py:1240, in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, user_agent, force_download, proxies, etag_timeout, token, local_files_only, headers, endpoint, legacy_cache_layout, resume_download, force_filename, local_dir_use_symlinks)
1239 else:
-> 1240 return _hf_hub_download_to_cache_dir(
1241 # Destination
1242 cache_dir=cache_dir,
1243 # File info
1244 repo_id=repo_id,
1245 filename=filename,
1246 repo_type=repo_type,
1247 revision=revision,
1248 # HTTP info
1249 endpoint=endpoint,
1250 etag_timeout=etag_timeout,
1251 headers=headers,
1252 proxies=proxies,
1253 token=token,
1254 # Additional options
1255 local_files_only=local_files_only,
1256 force_download=force_download,
1257 )
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/file_download.py:1303, in _hf_hub_download_to_cache_dir(cache_dir, repo_id, filename, repo_type, revision, endpoint, etag_timeout, headers, proxies, token, local_files_only, force_download)
1301 # Try to get metadata (etag, commit_hash, url, size) from the server.
1302 # If we can't, a HEAD request error is returned.
-> 1303 (url_to_download, etag, commit_hash, expected_size, head_call_error) = _get_metadata_or_catch_error(
1304 repo_id=repo_id,
1305 filename=filename,
1306 repo_type=repo_type,
1307 revision=revision,
1308 endpoint=endpoint,
1309 proxies=proxies,
1310 etag_timeout=etag_timeout,
1311 headers=headers,
1312 token=token,
1313 local_files_only=local_files_only,
1314 storage_folder=storage_folder,
1315 relative_filename=relative_filename,
1316 )
1318 # etag can be None for several reasons:
1319 # 1. we passed local_files_only.
1320 # 2. we don't have a connection
(...)
1326 # If the specified revision is a commit hash, look inside "snapshots".
1327 # If the specified revision is a branch or tag, look inside "refs".
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/file_download.py:1751, in _get_metadata_or_catch_error(repo_id, filename, repo_type, revision, endpoint, proxies, etag_timeout, headers, token, local_files_only, relative_filename, storage_folder)
1750 try:
-> 1751 metadata = get_hf_file_metadata(
1752 url=url, proxies=proxies, timeout=etag_timeout, headers=headers, token=token
1753 )
1754 except EntryNotFoundError as http_error:
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/file_download.py:1673, in get_hf_file_metadata(url, token, proxies, timeout, library_name, library_version, user_agent, headers)
1672 # Retrieve metadata
-> 1673 r = _request_wrapper(
1674 method="HEAD",
1675 url=url,
1676 headers=headers,
1677 allow_redirects=False,
1678 follow_relative_redirects=True,
1679 proxies=proxies,
1680 timeout=timeout,
1681 )
1682 hf_raise_for_status(r)
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/file_download.py:376, in _request_wrapper(method, url, follow_relative_redirects, **params)
375 if follow_relative_redirects:
--> 376 response = _request_wrapper(
377 method=method,
378 url=url,
379 follow_relative_redirects=False,
380 **params,
381 )
383 # If redirection, we redirect only relative paths.
384 # This is useful in case of a renamed repository.
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/file_download.py:400, in _request_wrapper(method, url, follow_relative_redirects, **params)
399 response = get_session().request(method=method, url=url, **params)
--> 400 hf_raise_for_status(response)
401 return response
File /opt/anaconda/lib/python3.11/site-packages/huggingface_hub/utils/_errors.py:315, in hf_raise_for_status(response, endpoint_name)
314 message = f"{response.status_code} Client Error." + "\n\n" + f"Entry Not Found for url: {response.url}."
--> 315 raise EntryNotFoundError(message, response) from e
317 elif error_code == "GatedRepo":
EntryNotFoundError: 404 Client Error. (Request ID: Root=1-673f6d41-3ecb79e427ba1cac1d8217b6;d62cc26a-dd6d-4f5e-840e-2dfcdf56b28a)
Entry Not Found for url: https://huggingface.co/MediaTek-Research/Clairaudience/resolve/main/preprocessor_config.json.
The above exception was the direct cause of the following exception:
OSError Traceback (most recent call last)
Cell In[5], line 1
----> 1 pipe = pipeline("automatic-speech-recognition", model="MediaTek-Research/Clairaudience")
File /opt/anaconda/lib/python3.11/site-packages/transformers/pipelines/init.py:1040, in pipeline(task, model, config, tokenizer, feature_extractor, image_processor, framework, revision, use_fast, token, device, device_map, torch_dtype, trust_remote_code, model_kwargs, pipeline_class, **kwargs)
1038 # Instantiate feature_extractor if needed
1039 if isinstance(feature_extractor, (str, tuple)):
-> 1040 feature_extractor = AutoFeatureExtractor.from_pretrained(
1041 feature_extractor, _from_pipeline=task, **hub_kwargs, **model_kwargs
1042 )
1044 if (
1045 feature_extractor._processor_class
1046 and feature_extractor._processor_class.endswith("WithLM")
1047 and isinstance(model_name, str)
1048 ):
1049 try:
File /opt/anaconda/lib/python3.11/site-packages/transformers/models/auto/feature_extraction_auto.py:343, in AutoFeatureExtractor.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
340 trust_remote_code = kwargs.pop("trust_remote_code", None)
341 kwargs["_from_auto"] = True
--> 343 config_dict, _ = FeatureExtractionMixin.get_feature_extractor_dict(pretrained_model_name_or_path, **kwargs)
344 feature_extractor_class = config_dict.get("feature_extractor_type", None)
345 feature_extractor_auto_map = None
File /opt/anaconda/lib/python3.11/site-packages/transformers/feature_extraction_utils.py:499, in FeatureExtractionMixin.get_feature_extractor_dict(cls, pretrained_model_name_or_path, **kwargs)
496 feature_extractor_file = FEATURE_EXTRACTOR_NAME
497 try:
498 # Load from local folder or from cache or download from model Hub and cache
--> 499 resolved_feature_extractor_file = cached_file(
500 pretrained_model_name_or_path,
501 feature_extractor_file,
502 cache_dir=cache_dir,
503 force_download=force_download,
504 proxies=proxies,
505 resume_download=resume_download,
506 local_files_only=local_files_only,
507 subfolder=subfolder,
508 token=token,
509 user_agent=user_agent,
510 revision=revision,
511 )
512 except EnvironmentError:
513 # Raise any environment error raise by cached_file
. It will have a helpful error message adapted to
514 # the original exception.
515 raise
File /opt/anaconda/lib/python3.11/site-packages/transformers/utils/hub.py:456, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
454 if revision is None:
455 revision = "main"
--> 456 raise EnvironmentError(
457 f"{path_or_repo_id} does not appear to have a file named {full_filename}. Checkout "
458 f"'https://huggingface.co/{path_or_repo_id}/tree/{revision}' for available files."
459 ) from e
460 except HTTPError as err:
461 resolved_file = _get_cache_file_to_return(path_or_repo_id, full_filename, cache_dir, revision)
OSError: MediaTek-Research/Clairaudience does not appear to have a file named preprocessor_config.json. Checkout 'https://huggingface.co/MediaTek-Research/Clairaudience/tree/main' for available files.