Upload folder using huggingface_hub
#2
by
dn6
HF staff
- opened
safety_checker/config.json
CHANGED
@@ -14,7 +14,7 @@
|
|
14 |
"model_type": "clip_text_model",
|
15 |
"num_attention_heads": 12
|
16 |
},
|
17 |
-
"torch_dtype": "
|
18 |
"transformers_version": "4.43.2",
|
19 |
"vision_config": {
|
20 |
"dropout": 0.0,
|
|
|
14 |
"model_type": "clip_text_model",
|
15 |
"num_attention_heads": 12
|
16 |
},
|
17 |
+
"torch_dtype": "float32",
|
18 |
"transformers_version": "4.43.2",
|
19 |
"vision_config": {
|
20 |
"dropout": 0.0,
|
safety_checker/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fb351a5ded815c3ff744968ad9c6b218d071b9d313d04f35e813b84b4c0ffde8
|
3 |
+
size 1215979664
|
text_encoder/config.json
CHANGED
@@ -19,7 +19,7 @@
|
|
19 |
"num_hidden_layers": 12,
|
20 |
"pad_token_id": 1,
|
21 |
"projection_dim": 768,
|
22 |
-
"torch_dtype": "
|
23 |
"transformers_version": "4.43.2",
|
24 |
"vocab_size": 49408
|
25 |
}
|
|
|
19 |
"num_hidden_layers": 12,
|
20 |
"pad_token_id": 1,
|
21 |
"projection_dim": 768,
|
22 |
+
"torch_dtype": "float32",
|
23 |
"transformers_version": "4.43.2",
|
24 |
"vocab_size": 49408
|
25 |
}
|
text_encoder/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:778d02eb9e707c3fbaae0b67b79ea0d1399b52e624fb634f2f19375ae7c047c3
|
3 |
+
size 492265168
|
vae/diffusion_pytorch_model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b4d2b5932bb4151e54e694fd31ccf51fca908223c9485bd56cd0e1d83ad94c49
|
3 |
+
size 334643268
|