Update README.md
Browse files
README.md
CHANGED
@@ -3,7 +3,7 @@ license: mit
|
|
3 |
pipeline_tag: image-text-to-text
|
4 |
library_name: transformers
|
5 |
base_model:
|
6 |
-
- OpenGVLab/InternViT-
|
7 |
- Qwen/Qwen2.5-72B-Instruct
|
8 |
base_model_relation: merge
|
9 |
language:
|
@@ -53,48 +53,74 @@ InternVL 2.5is a multimodal large language model series, featuring models of var
|
|
53 |
### Image Benchmarks
|
54 |
|
55 |
|
56 |
-
| Benchmark
|
57 |
-
|
58 |
-
| MMMU (val)
|
59 |
-
| MMMU (test)
|
60 |
-
| MMMU-PRO (overall)
|
61 |
-
| MathVista (mini)
|
62 |
-
| MathVision (mini)
|
63 |
-
| MathVision (full)
|
64 |
-
| MathVerse (mini)
|
65 |
-
| Olympiad Bench
|
66 |
-
| AI2D (w / wo M)
|
67 |
-
| ChartQA (test avg.)
|
68 |
-
| TextVQA (val)
|
69 |
-
| DocVQA (test)
|
70 |
-
| InfoVQA (test)
|
71 |
-
| OCR-Bench
|
72 |
-
| SEED-2 Plus
|
73 |
-
| CharXiv (RQ / DQ)
|
74 |
-
| VCR-EN-Easy (EM / Jaccard) | 52.0 / 65.4 | 91.6 / 96.4 | 62.0 / 77.7 | 63.9 / 74.7 | 62.7 / 77.7 | - |
|
75 |
-
| BLINK (val)
|
76 |
-
| Mantis Eval
|
77 |
-
| MMIU
|
78 |
-
| Muir Bench
|
79 |
-
| MMT (val)
|
80 |
-
| MIRB (avg.)
|
81 |
-
| RealWorld QA
|
82 |
-
| MME-RW (EN)
|
83 |
-
| WildVision (win rate)|71.8
|
84 |
-
| R-Bench
|
85 |
-
| MME (sum)
|
86 |
-
| MMB (EN / CN)
|
87 |
-
| MMBv1.1 (EN)
|
88 |
-
| MMVet (turbo)
|
89 |
-
| MMVetv2 (0613)
|
90 |
-
| MMStar
|
91 |
-
| HallBench (avg.)
|
92 |
-
| MMHal (score)
|
93 |
-
| CRPE (relation)
|
94 |
-
| POPE (avg.)
|
|
|
95 |
|
96 |
### Video Benchmarks
|
97 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
### Multimodal Multilingual Understanding
|
99 |
|
100 |
<table style="width:100%; border-collapse: collapse;">
|
|
|
3 |
pipeline_tag: image-text-to-text
|
4 |
library_name: transformers
|
5 |
base_model:
|
6 |
+
- OpenGVLab/InternViT-6B-448px-V2_5
|
7 |
- Qwen/Qwen2.5-72B-Instruct
|
8 |
base_model_relation: merge
|
9 |
language:
|
|
|
53 |
### Image Benchmarks
|
54 |
|
55 |
|
56 |
+
| Benchmark | GPT-4V | GPT-4o-20240513 | Claude-3-Opus | Claude-3.5-Sonnet | Gemini-1.5-Pro | LLaVA-OneVision-72B | Qwen2-VL-72B | InternVL2.5-78B |
|
57 |
+
|----------------------------|-------------|-----------------|---------------|-------------------|----------------|---------------------|--------------|-----------------|
|
58 |
+
| MMMU (val) | 63.1 | 69.1 | - | 68.3 | 62.2 | 56.8 | 64.5 | 70.1 |
|
59 |
+
| MMMU (test) | - | - | - | - | - | - | - | 61.8 |
|
60 |
+
| MMMU-PRO (overall) | - | 51.9 | - | 51.5 | 46.9 | 31.0 | 46.2 | 48.6 |
|
61 |
+
| MathVista (mini) | 58.1 | 63.8 | - | 67.7 | 63.9 | 67.5 | 70.5 | 72.3 |
|
62 |
+
| MathVision (mini) | - | - | - | - | - | - | - | 34.9 |
|
63 |
+
| MathVision (full) | 24.0 | 30.4 | - | - | 19.2 | - | 25.9 | 32.2 |
|
64 |
+
| MathVerse (mini) | 32.8 | 50.2 | - | - | - | 39.1 | - | 51.7 |
|
65 |
+
| Olympiad Bench | 18.0 | 25.9 | - | - | - | - | - | 11.6 |
|
66 |
+
| AI2D (w / wo M) | 78.2 / 89.4 | 84.6 / 94.2 | 70.6 / 88.1 | 81.2 / 94.7 | 79.1 / 94.4 | 85.6 / - | 88.1 / - | 89.1 / 95.7 |
|
67 |
+
| ChartQA (test avg.) | 78.5 | 85.7 | 80.8 | 90.8 | 87.2 | 83.7 | 88.3 | 88.3 |
|
68 |
+
| TextVQA (val) | 78.0 | 77.4 | 67.5 | 74.1 | 78.8 | 80.5 | 85.5 | 83.4 |
|
69 |
+
| DocVQA (test) | 88.4 | 92.8 | 89.3 | 95.2 | 93.1 | 91.3 | 96.5 | 95.1 |
|
70 |
+
| InfoVQA (test) | 75.1 | 79.2 | 55.6 | 74.3 | 81.0 | 74.9 | 84.5 | 84.1 |
|
71 |
+
| OCR-Bench | 645 | 736 | 694 | 788 | 754 | 741 | 877 | 854 |
|
72 |
+
| SEED-2 Plus | 53.8 | 72.0 | 44.2 | 71.7 | - | 69.7 | - | 71.3 |
|
73 |
+
| CharXiv (RQ / DQ) | 37.1 / 79.9 | 47.1 / 84.5 | 30.2 / 71.6 | 60.2 / 84.3 | 43.3 / 72.0 | - | 91.3 / 94.6 | 42.4 / 82.3 |
|
74 |
+
| VCR-EN-Easy (EM / Jaccard) | 52.0 / 65.4 | 91.6 / 96.4 | 62.0 / 77.7 | 63.9 / 74.7 | 62.7 / 77.7 | - | 94.6 | 95.7 / 94.5 |
|
75 |
+
| BLINK (val) | 54.6 | 68.0 | - | - | - | 55.4 | - | 63.8 |
|
76 |
+
| Mantis Eval | 62.7 | - | - | - | - | 77.6 | - | 77.0 |
|
77 |
+
| MMIU | - | 55.7 | - | 53.4 | 53.4 | - | - | 55.8 |
|
78 |
+
| Muir Bench | 62.3 | 68.0 | - | - | - | 54.8 | - | 63.5 |
|
79 |
+
| MMT (val) | 64.3 | 65.4 | - | - | 64.5 | - | 71.8 | 70.8 |
|
80 |
+
| MIRB (avg.) | 53.1 | - | - | - | - | - | - | 61.1 |
|
81 |
+
| RealWorld QA | 61.4 | 75.4 | - | 60.1 | 67.5 | 71.9 | 77.8 | 78.7 |
|
82 |
+
| MME-RW (EN) | - | 45.2 | - | 51.6 | 38.2 | - | - | 62.9 |
|
83 |
+
| WildVision (win rate) | 71.8 | 80.6 | - | - | - | - | - | 71.4 |
|
84 |
+
| R-Bench | 65.6 | 77.7 | - | - | - | - | - | 77.2 |
|
85 |
+
| MME (sum) | 1926.6 | -- | 1586.8 | -- | -- | 2261.0 | 2482.7 | 2494.5 |
|
86 |
+
| MMB (EN / CN) | 81.0 / 80.2 | 83.4 / 82.1 | 63.3 / 59.2 | 82.6 / 83.5 | 73.9 / 73.8 | 85.8 / 85.3 | 86.5 / 86.6 | 88.3 / 88.5 |
|
87 |
+
| MMBv1.1 (EN) | 80.0 | 83.1 | 60.1 | 80.9 | 74.6 | 85.0 | 85.9 | 87.4 |
|
88 |
+
| MMVet (turbo) | 67.5 | 69.1 | 51.7 | 70.1 | 64.0 | 60.6 | 74.0 | 72.3 |
|
89 |
+
| MMVetv2 (0613) | 66.3 | 71.0 | 55.8 | 71.8 | 66.9 | -- | 66.9 | 65.5 |
|
90 |
+
| MMStar | 56.0 | 64.7 | 45.7 | 65.1 | 59.1 | 65.8 | 68.3 | 69.5 |
|
91 |
+
| HallBench (avg.) | 46.5 | 55.0 | 37.8 | 55.5 | 45.6 | 49.0 | 58.1 | 57.4 |
|
92 |
+
| MMHal (score) | -- | 4.00 | -- | -- | -- | -- | -- | 3.89 |
|
93 |
+
| CRPE (relation) | -- | 76.6 | -- | -- | -- | -- | -- | 78.8 |
|
94 |
+
| POPE (avg.) | -- | 86.9 | -- | -- | -- | -- | -- | 90.8 |
|
95 |
+
|
96 |
|
97 |
### Video Benchmarks
|
98 |
|
99 |
+
| Model Name | Video-MME (wo / w sub) | MVBench | MMBench-Video (val) | MLVU (M-Avg) | LongVideoBench (val total) | CG-Bench v1.1 (long / clue acc.) |
|
100 |
+
|---------------------------------------------|-------------|------|-------|-------|------|-------------|
|
101 |
+
| **InternVL2.5-1B** | 50.3 / 52.3 | 64.3 | 1.36 | 57.3 | 47.9 | - |
|
102 |
+
| Qwen2-VL-2B | 55.6 / 60.4 | 63.2 | - | - | - | - |
|
103 |
+
| **InternVL2.5-2B** | 51.9 / 54.1 | 68.8 | 1.44 | 61.4 | 52.0 | - |
|
104 |
+
| **InternVL2.5-4B** | 62.3 / 63.6 | 71.6 | 1.73 | 68.3 | 55.2 | - |
|
105 |
+
| VideoChat2-HD | 45.3 / 55.7 | 62.3 | 1.22 | 47.9 | - | - |
|
106 |
+
| MiniCPM-V-2.6 | 60.9 / 63.6 | - | 1.70 | - | 54.9 | - |
|
107 |
+
| LLaVA-OneVision-7B | 58.2 / - | 56.7 | - | - | - | - |
|
108 |
+
| Qwen2-VL-7B | 63.3 / 69.0 | 67.0 | 1.44 | - | 55.6 | - |
|
109 |
+
| **InternVL2.5-8B** | 64.2 / 66.9 | 72.0 | 1.68 | 68.9 | 60.0 | - |
|
110 |
+
| **InternVL2.5-26B** | 66.9 / 69.2 | 75.2 | 1.86 | 72.3 | 59.9 | - |
|
111 |
+
| Oryx-1.5-32B | 67.3 / 74.9 | 70.1 | 1.52 | 72.3 | - | - |
|
112 |
+
| VILA-1.5-40B | 60.1 / 61.1 | - | 1.61 | 56.7 | - | - |
|
113 |
+
| **InternVL2.5-38B** | 70.7 / 73.1 | 74.4 | 1.82 | 75.3 | 63.3 | - |
|
114 |
+
| GPT-4V/4T | 59.9 / 63.3 | 43.7 | 1.53 | 49.2 | 59.1 | - |
|
115 |
+
| GPT-4o-20240513 | 71.9 / 77.2 | - | 1.63 | 64.6 | 66.7 | - |
|
116 |
+
| GPT-4o-20240806 | - | - | 1.87 | - | - | - |
|
117 |
+
| Gemini-1.5-Pro | 75.0 / 81.3 | - | 1.30 | - | 64.0 | - |
|
118 |
+
| VideoLLaMA2-72B | 61.4 / 63.1 | 62.0 | - | - | - | - |
|
119 |
+
| LLaVA-OneVision-72B | 66.2 / 69.5 | 59.4 | - | 66.4 | 61.3 | - |
|
120 |
+
| Qwen2-VL-72B | 71.2 / 77.8 | 73.6 | 1.70 | - | - | 41.3 / 56.2 |
|
121 |
+
| InternVL2-Llama3-76B | 64.7 / 67.8 | 69.6 | 1.71 | 69.9 | 61.1 | - |
|
122 |
+
| **InternVL2.5-78B** | 72.1 / 74.0 | 76.4 | 1.97 | 75.7 | 63.6 | 42.2 / 58.5 |
|
123 |
+
|
124 |
### Multimodal Multilingual Understanding
|
125 |
|
126 |
<table style="width:100%; border-collapse: collapse;">
|