File size: 2,299 Bytes
6bac8b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
{
  "_name_or_path": "facebook/opt-125m",
  "_remove_final_layer_norm": false,
  "activation_dropout": 0.0,
  "activation_function": "relu",
  "architectures": [
    "OPTForCausalLM"
  ],
  "attention_dropout": 0.0,
  "bos_token_id": 2,
  "do_layer_norm_before": true,
  "dropout": 0.1,
  "enable_bias": true,
  "eos_token_id": 2,
  "ffn_dim": 3072,
  "hidden_size": 768,
  "init_std": 0.02,
  "layer_norm_elementwise_affine": true,
  "layerdrop": 0.0,
  "max_position_embeddings": 2048,
  "model_type": "opt",
  "num_attention_heads": 12,
  "num_hidden_layers": 12,
  "pad_token_id": 1,
  "prefix": "</s>",
  "quantization_config": {
    "batch_size": 1,
    "bits": 4,
    "block_name_to_quantize": "model.decoder.layers",
    "cache_block_outputs": true,
    "damp_percent": 0.1,
    "dataset": [
      "The sun sets behind the mountains, casting a warm glow over the valley.",
      "In a galaxy far, far away, a lone spaceship embarks on a daring mission.",
      "Amidst the hustle and bustle of the city, a quiet park offers a serene escape.",
      "The sound of waves crashing against the shore soothes the soul.",
      "A mysterious door appeared at the end of the winding cobblestone path.",
      "Deep in the enchanted forest, magical creatures danced under the moonlight.",
      "On a rainy day, the aroma of freshly brewed coffee filled the cozy cafe.",
      "Lost in thought, she gazed out of the window as raindrops painted patterns on the glass.",
      "The old bookstore had a charm of its own, with dusty shelves holding forgotten tales.",
      "Beneath a blanket of stars, a campfire flickered, sharing stories of the cosmos."
    ],
    "desc_act": false,
    "exllama_config": {
      "version": 1
    },
    "group_size": 128,
    "max_input_length": null,
    "model_seqlen": 2048,
    "module_name_preceding_first_block": [
      "model.decoder.embed_tokens",
      "model.decoder.embed_positions",
      "model.decoder.final_layer_norm"
    ],
    "pad_token_id": null,
    "quant_method": "gptq",
    "sym": true,
    "tokenizer": null,
    "true_sequential": true,
    "use_cuda_fp16": true,
    "use_exllama": true
  },
  "torch_dtype": "float16",
  "transformers_version": "4.36.0.dev0",
  "use_cache": true,
  "vocab_size": 50272,
  "word_embed_proj_dim": 768
}