RaushanTurganbay HF staff commited on
Commit
ca4bea3
1 Parent(s): a272c74

Upload processor

Browse files
chat_template.json CHANGED
@@ -1,3 +1,3 @@
1
  {
2
  "chat_template": "{% for message in messages %}{% if message['role'] != 'system' %}{{ message['role'].upper() + ': '}}{% endif %}{# Render all images first #}{% for content in message['content'] | selectattr('type', 'equalto', 'image') %}{{ '<image>\n' }}{% endfor %}{# Render all text next #}{% if message['role'] != 'assistant' %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{{ content['text'] + ' '}}{% endfor %}{% else %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{% generation %}{{ content['text'] + ' '}}{% endgeneration %}{% endfor %}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT:' }}{% endif %}"
3
- }
 
1
  {
2
  "chat_template": "{% for message in messages %}{% if message['role'] != 'system' %}{{ message['role'].upper() + ': '}}{% endif %}{# Render all images first #}{% for content in message['content'] | selectattr('type', 'equalto', 'image') %}{{ '<image>\n' }}{% endfor %}{# Render all text next #}{% if message['role'] != 'assistant' %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{{ content['text'] + ' '}}{% endfor %}{% else %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{% generation %}{{ content['text'] + ' '}}{% endgeneration %}{% endfor %}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT:' }}{% endif %}"
3
+ }
processor_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "image_token": "<image>",
3
+ "num_additional_image_tokens": 1,
4
+ "patch_size": 14,
5
+ "processor_class": "LlavaProcessor",
6
+ "vision_feature_select_strategy": "default"
7
+ }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -47,6 +47,7 @@
47
  "bos_token": "<s>",
48
  "clean_up_tokenization_spaces": false,
49
  "eos_token": "</s>",
 
50
  "legacy": false,
51
  "model_max_length": 1000000000000000019884624838656,
52
  "pad_token": "<pad>",
 
47
  "bos_token": "<s>",
48
  "clean_up_tokenization_spaces": false,
49
  "eos_token": "</s>",
50
+ "extra_special_tokens": {},
51
  "legacy": false,
52
  "model_max_length": 1000000000000000019884624838656,
53
  "pad_token": "<pad>",