SafetensorError: Error while deserializing header: MetadataIncompleteBuffer
What could be the reason for this? It was working fine until a while ago.
```
SafetensorError Traceback (most recent call last)
/tmp/ipykernel_2268/3485156104.py in <cell line: 1>()
----> 1 model = AutoModelForCausalLM.from_pretrained("Nexusflow/Starling-LM-7B-beta")
~/conda/pytorch21_p39_gpu_v1/lib/python3.9/site-packages/transformers/models/auto/auto_factory.py in from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
564 elif type(config) in cls._model_mapping.keys():
565 model_class = _get_model_class(config, cls._model_mapping)
--> 566 return model_class.from_pretrained(
567 pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
568 )
~/conda/pytorch21_p39_gpu_v1/lib/python3.9/site-packages/transformers/modeling_utils.py in from_pretrained(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, use_safetensors, *model_args, **kwargs)
3848 offload_index,
3849 error_msgs,
-> 3850 ) = cls._load_pretrained_model(
3851 model,
3852 state_dict,
~/conda/pytorch21_p39_gpu_v1/lib/python3.9/site-packages/transformers/modeling_utils.py in _load_pretrained_model(cls, model, state_dict, loaded_keys, resolved_archive_file, pretrained_model_name_or_path, ignore_mismatched_sizes, sharded_metadata, _fast_init, low_cpu_mem_usage, device_map, offload_folder, offload_state_dict, dtype, is_quantized, keep_in_fp32_modules)
4257 if shard_file in disk_only_shard_files:
4258 continue
-> 4259 state_dict = load_state_dict(shard_file)
4260
4261 # Mistmatched keys contains tuples key/shape1/shape2 of weights in the checkpoint that have a shape not
~/conda/pytorch21_p39_gpu_v1/lib/python3.9/site-packages/transformers/modeling_utils.py in load_state_dict(checkpoint_file)
504 if checkpoint_file.endswith(".safetensors") and is_safetensors_available():
505 # Check format of the archive
--> 506 with safe_open(checkpoint_file, framework="pt") as f:
507 metadata = f.metadata()
508 if metadata.get("format") not in ["pt", "tf", "flax"]:
SafetensorError: Error while deserializing header: MetadataIncompleteBuffer
```
@chandrak could you check your transformers version , also try clearing HF cache and try again