Update README.md
70fe66b
verified
-
1.52 kB
initial commit
-
5.33 kB
Upload folder using huggingface_hub (#1)
-
0 Bytes
Update README.md
model.pt
Detected Pickle imports (42)
- "PIL.Image.Resampling",
- "torch.FloatStorage",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.BunnyLlamaForCausalLM",
- "transformers.image_utils.ChannelDimension",
- "collections.OrderedDict",
- "quanto.tensor.qtype.qtype",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.SigLipVisionEmbeddings",
- "torch.nn.modules.container.Sequential",
- "torch._utils._rebuild_parameter",
- "torch.device",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.LlamaDecoderLayer",
- "quanto.nn.qconv2d.QConv2d",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.configuration_bunny_llama.SigLipVisionConfig",
- "transformers.activations.PytorchGELUTanh",
- "quanto.nn.qlinear.QLinear",
- "torch._utils._rebuild_tensor_v2",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.SigLipVisionTransformer",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.configuration_bunny_llama.BunnyLlamaConfig",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.SigLipEncoder",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.SigLipAttention",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.SigLipVisionModel",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.LlamaRotaryEmbedding",
- "torch.nn.modules.activation.SiLU",
- "torch.float8_e4m3fn",
- "torch.nn.modules.sparse.Embedding",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.SigLipEncoderLayer",
- "torch.HalfStorage",
- "torch.nn.modules.container.ModuleList",
- "torch.float16",
- "torch.nn.modules.normalization.LayerNorm",
- "transformers.generation.configuration_utils.GenerationConfig",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.LlamaMLP",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.LlamaRMSNorm",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.SigLipMLP",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.BunnyLlamaModel",
- "torch.nn.modules.linear.Identity",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.SigLipImageProcessor",
- "__builtin__.set",
- "torch.nn.modules.activation.GELU",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.SigLipVisionTower",
- "torch.LongStorage",
- "transformers_modules.BAAI.Bunny-Llama-3-8B-V.04f5597da26fc1502d844e4eb4e23a297f99e213.modeling_bunny_llama.LlamaSdpaAttention"
How to fix it?
17 GB
Upload folder using huggingface_hub (#1)
-
1.02 kB
Upload folder using huggingface_hub (#1)
-
301 Bytes
Upload folder using huggingface_hub (#1)
-
9.09 MB
Upload folder using huggingface_hub (#1)
-
51 kB
Upload folder using huggingface_hub (#1)