File size: 1,501 Bytes
5188e86
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1654a07
5188e86
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1654a07
5188e86
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1654a07
5188e86
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
{
  "meta-llama/Meta-Llama-3-8B-Instruct": {
    "model_name": "meta-llama/Meta-Llama-3-8B-Instruct",
    "stop_tokens": [
      "<|eot_id|>",
      "<|end_of_text|>",
      "<|starter_header_id|>",
      "<|end_header_id|>",
      "assistant"
    ],
    "stop_token_ids": [
      128009,
      128001,
      128006,
      128007,
      78191
    ],
    "extract_input": "<|begin_of_text|><|start_header_id|>user<|end_header_id|>Deutsch:\n\n",
    "extract_input_with_system_prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nA chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n"
  },
  "meta-llama/Meta-Llama-3-70B-Instruct": {
    "model_name": "meta-llama/Meta-Llama-3-70B-Instruct",
    "stop_tokens": [
      "<|eot_id|>",
      "<|end_of_text|>",
      "<|starter_header_id|>",
      "<|end_header_id|>",
      "assistant"
    ],
    "stop_token_ids": [
      128009,
      128001,
      128006,
      128007,
      78191
    ],
    "extract_input": "<|begin_of_text|><|start_header_id|>user<|end_header_id|>Deutsch:\n\n"
  },
  "meta-llama/Llama-2-7b-chat-hf": {
    "model_name": "meta-llama/Llama-2-7b-chat-hf",
    "stop_tokens": [
      "</s>",
      "<s>",
      "<unk>",
      "assistant"
    ],
    "stop_token_ids": [
      2,
      1,
      0,
      20255
    ],
    "extract_input": "[INST]Deutsch: "
  }
}