|
--- |
|
dataset_info: |
|
- config_name: A-OKVQA |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 14048199 |
|
num_examples: 1000 |
|
download_size: 1168340 |
|
dataset_size: 14048199 |
|
- config_name: CIFAR-100 |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 1519890 |
|
num_examples: 1000 |
|
download_size: 20544 |
|
dataset_size: 1519890 |
|
- config_name: CIRR |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 70162098 |
|
num_examples: 1000 |
|
download_size: 1565489 |
|
dataset_size: 70162098 |
|
- config_name: ChartQA |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 14354641 |
|
num_examples: 1000 |
|
download_size: 1434448 |
|
dataset_size: 14354641 |
|
- config_name: Country211 |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 3678000 |
|
num_examples: 1000 |
|
download_size: 31556 |
|
dataset_size: 3678000 |
|
- config_name: DocVQA |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 23044459 |
|
num_examples: 1000 |
|
download_size: 1734476 |
|
dataset_size: 23044459 |
|
- config_name: EDIS |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 184208708 |
|
num_examples: 1000 |
|
download_size: 3350382 |
|
dataset_size: 184208708 |
|
- config_name: FashionIQ |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 71169665 |
|
num_examples: 1000 |
|
download_size: 1729457 |
|
dataset_size: 71169665 |
|
- config_name: GQA |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 40809641 |
|
num_examples: 1000 |
|
download_size: 1764457 |
|
dataset_size: 40809641 |
|
- config_name: HatefulMemes |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 184890 |
|
num_examples: 1000 |
|
download_size: 9972 |
|
dataset_size: 184890 |
|
- config_name: ImageNet-1K |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 28773890 |
|
num_examples: 1000 |
|
download_size: 185019 |
|
dataset_size: 28773890 |
|
- config_name: ImageNet-A |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 28772890 |
|
num_examples: 1000 |
|
download_size: 147780 |
|
dataset_size: 28772890 |
|
- config_name: ImageNet-R |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 3456890 |
|
num_examples: 1000 |
|
download_size: 23656 |
|
dataset_size: 3456890 |
|
- config_name: InfographicsVQA |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 19114439 |
|
num_examples: 1000 |
|
download_size: 1439837 |
|
dataset_size: 19114439 |
|
- config_name: MSCOCO |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 97759085 |
|
num_examples: 1000 |
|
download_size: 1681753 |
|
dataset_size: 97759085 |
|
- config_name: MSCOCO_i2t |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 60201740 |
|
num_examples: 1000 |
|
download_size: 1785583 |
|
dataset_size: 60201740 |
|
- config_name: MSCOCO_t2i |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 87127008 |
|
num_examples: 1000 |
|
download_size: 1296167 |
|
dataset_size: 87127008 |
|
- config_name: N24News |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 630658 |
|
num_examples: 1000 |
|
download_size: 110698 |
|
dataset_size: 630658 |
|
- config_name: NIGHTS |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 75116000 |
|
num_examples: 1000 |
|
download_size: 1528646 |
|
dataset_size: 75116000 |
|
- config_name: OK-VQA |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 15332578 |
|
num_examples: 1000 |
|
download_size: 1564823 |
|
dataset_size: 15332578 |
|
- config_name: OVEN |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 717934263 |
|
num_examples: 1000 |
|
download_size: 406792141 |
|
dataset_size: 717934263 |
|
- config_name: ObjectNet |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 2036000 |
|
num_examples: 1000 |
|
download_size: 27132 |
|
dataset_size: 2036000 |
|
- config_name: Place365 |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 7045000 |
|
num_examples: 1000 |
|
download_size: 89866 |
|
dataset_size: 7045000 |
|
- config_name: RefCOCO |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 96493941 |
|
num_examples: 1000 |
|
download_size: 1858145 |
|
dataset_size: 96493941 |
|
- config_name: RefCOCO-Matching |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 145712476 |
|
num_examples: 1000 |
|
download_size: 2879385 |
|
dataset_size: 145712476 |
|
- config_name: SUN397 |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 7990000 |
|
num_examples: 1000 |
|
download_size: 118447 |
|
dataset_size: 7990000 |
|
- config_name: ScienceQA |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 23870406 |
|
num_examples: 1000 |
|
download_size: 958782 |
|
dataset_size: 23870406 |
|
- config_name: TextVQA |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 17435986 |
|
num_examples: 1000 |
|
download_size: 1571656 |
|
dataset_size: 17435986 |
|
- config_name: VOC2007 |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 368000 |
|
num_examples: 1000 |
|
download_size: 13813 |
|
dataset_size: 368000 |
|
- config_name: VisDial |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 67989850 |
|
num_examples: 1000 |
|
download_size: 1730820 |
|
dataset_size: 67989850 |
|
- config_name: Visual7W |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 22047066 |
|
num_examples: 1000 |
|
download_size: 1564788 |
|
dataset_size: 22047066 |
|
- config_name: Visual7W-Pointing |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 94906832 |
|
num_examples: 1000 |
|
download_size: 1299380 |
|
dataset_size: 94906832 |
|
- config_name: VisualNews_i2t |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 118329649 |
|
num_examples: 1000 |
|
download_size: 81491360 |
|
dataset_size: 118329649 |
|
- config_name: VisualNews_t2i |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 97176206 |
|
num_examples: 1000 |
|
download_size: 1763677 |
|
dataset_size: 97176206 |
|
- config_name: VizWiz |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 20550246 |
|
num_examples: 1000 |
|
download_size: 1425789 |
|
dataset_size: 20550246 |
|
- config_name: WebQA |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 197701404 |
|
num_examples: 1000 |
|
download_size: 3257136 |
|
dataset_size: 197701404 |
|
- config_name: Wiki-SS-NQ |
|
features: |
|
- name: qry_text |
|
dtype: string |
|
- name: qry_img_path |
|
dtype: string |
|
- name: tgt_text |
|
sequence: string |
|
- name: tgt_img_path |
|
sequence: string |
|
splits: |
|
- name: test |
|
num_bytes: 74583207 |
|
num_examples: 1000 |
|
download_size: 1900579 |
|
dataset_size: 74583207 |
|
configs: |
|
- config_name: A-OKVQA |
|
data_files: |
|
- split: test |
|
path: A-OKVQA/test-* |
|
- config_name: CIFAR-100 |
|
data_files: |
|
- split: test |
|
path: CIFAR-100/test-* |
|
- config_name: CIRR |
|
data_files: |
|
- split: test |
|
path: CIRR/test-* |
|
- config_name: ChartQA |
|
data_files: |
|
- split: test |
|
path: ChartQA/test-* |
|
- config_name: Country211 |
|
data_files: |
|
- split: test |
|
path: Country211/test-* |
|
- config_name: DocVQA |
|
data_files: |
|
- split: test |
|
path: DocVQA/test-* |
|
- config_name: EDIS |
|
data_files: |
|
- split: test |
|
path: EDIS/test-* |
|
- config_name: FashionIQ |
|
data_files: |
|
- split: test |
|
path: FashionIQ/test-* |
|
- config_name: GQA |
|
data_files: |
|
- split: test |
|
path: GQA/test-* |
|
- config_name: HatefulMemes |
|
data_files: |
|
- split: test |
|
path: HatefulMemes/test-* |
|
- config_name: ImageNet-1K |
|
data_files: |
|
- split: test |
|
path: ImageNet-1K/test-* |
|
- config_name: ImageNet-A |
|
data_files: |
|
- split: test |
|
path: ImageNet-A/test-* |
|
- config_name: ImageNet-R |
|
data_files: |
|
- split: test |
|
path: ImageNet-R/test-* |
|
- config_name: InfographicsVQA |
|
data_files: |
|
- split: test |
|
path: InfographicsVQA/test-* |
|
- config_name: MSCOCO |
|
data_files: |
|
- split: test |
|
path: MSCOCO/test-* |
|
- config_name: MSCOCO_i2t |
|
data_files: |
|
- split: test |
|
path: MSCOCO_i2t/test-* |
|
- config_name: MSCOCO_t2i |
|
data_files: |
|
- split: test |
|
path: MSCOCO_t2i/test-* |
|
- config_name: N24News |
|
data_files: |
|
- split: test |
|
path: N24News/test-* |
|
- config_name: NIGHTS |
|
data_files: |
|
- split: test |
|
path: NIGHTS/test-* |
|
- config_name: OK-VQA |
|
data_files: |
|
- split: test |
|
path: OK-VQA/test-* |
|
- config_name: OVEN |
|
data_files: |
|
- split: test |
|
path: OVEN/test-* |
|
- config_name: ObjectNet |
|
data_files: |
|
- split: test |
|
path: ObjectNet/test-* |
|
- config_name: Place365 |
|
data_files: |
|
- split: test |
|
path: Place365/test-* |
|
- config_name: RefCOCO |
|
data_files: |
|
- split: test |
|
path: RefCOCO/test-* |
|
- config_name: RefCOCO-Matching |
|
data_files: |
|
- split: test |
|
path: RefCOCO-Matching/test-* |
|
- config_name: SUN397 |
|
data_files: |
|
- split: test |
|
path: SUN397/test-* |
|
- config_name: ScienceQA |
|
data_files: |
|
- split: test |
|
path: ScienceQA/test-* |
|
- config_name: TextVQA |
|
data_files: |
|
- split: test |
|
path: TextVQA/test-* |
|
- config_name: VOC2007 |
|
data_files: |
|
- split: test |
|
path: VOC2007/test-* |
|
- config_name: VisDial |
|
data_files: |
|
- split: test |
|
path: VisDial/test-* |
|
- config_name: Visual7W |
|
data_files: |
|
- split: test |
|
path: Visual7W/test-* |
|
- config_name: Visual7W-Pointing |
|
data_files: |
|
- split: test |
|
path: Visual7W-Pointing/test-* |
|
- config_name: VisualNews_i2t |
|
data_files: |
|
- split: test |
|
path: VisualNews_i2t/test-* |
|
- config_name: VisualNews_t2i |
|
data_files: |
|
- split: test |
|
path: VisualNews_t2i/test-* |
|
- config_name: VizWiz |
|
data_files: |
|
- split: test |
|
path: VizWiz/test-* |
|
- config_name: WebQA |
|
data_files: |
|
- split: test |
|
path: WebQA/test-* |
|
- config_name: Wiki-SS-NQ |
|
data_files: |
|
- split: test |
|
path: Wiki-SS-NQ/test-* |
|
license: apache-2.0 |
|
language: |
|
- en |
|
tags: |
|
- ranking |
|
pretty_name: MMEB |
|
size_categories: |
|
- 10K<n<100K |
|
--- |
|
|
|
# Massive Multimodal Embedding Benchmark |
|
|
|
We compile a large set of evaluation tasks to understand the capabilities of multimodal embedding models. This benchmark covers 4 meta tasks and 36 datasets meticulously selected for evaluation. |
|
|
|
The dataset is published in our paper [VLM2Vec: Training Vision-Language Models for Massive Multimodal Embedding Tasks](https://arxiv.org/abs/2410.05160). |
|
|
|
## Dataset Usage |
|
For each dataset, we have 1000 examples for evaluation. Each example contains a query and a set of targets. Both the query and target could be any combination of image and text. The first one in the candidate list is the groundtruth target. |
|
|
|
## Statistics |
|
We show the statistics of all the datasets as follows: |
|
<img width="900" alt="abs" src="statistics.png"> |
|
|
|
## Per-dataset Results |
|
We list the performance of different embedding models in the following: |
|
<img width="900" alt="abs" src="leaderboard.png"> |
|
|
|
## Submission |
|
We will set a formal leaderboard soon. If you want to add your results to the leaderboard, please send email to us at [email protected]. |
|
|
|
## Cite Us |
|
``` |
|
@article{jiang2024vlm2vec, |
|
title={VLM2Vec: Training Vision-Language Models for Massive Multimodal Embedding Tasks}, |
|
author={Jiang, Ziyan and Meng, Rui and Yang, Xinyi and Yavuz, Semih and Zhou, Yingbo and Chen, Wenhu}, |
|
journal={arXiv preprint arXiv:2410.05160}, |
|
year={2024} |
|
} |
|
``` |