sbmaruf commited on
Commit
9269184
·
1 Parent(s): 2bbee43

update model name

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -139,7 +139,7 @@ All models were evaluated using our proprietary evaluation pipeline and [LM Eval
139
  | Llama-3.1-8B-Instruct | 54 | 70.54 | 51.9 | 70.01 | 76.99 | 62.42 | 56.53 | 42.83 | 44.67 | 30.76 |
140
  | jais-family-13b-chat | 45.07 | 71.18 | 46.83 | 60.92 | 50.87 | 54.83 | 58.14 | 41.67 | 47.73 | 31.72 |
141
  | Mistral-Small-Instruct-2409 | 38.73 | 68.93 | 44.03 | 62.16 | 75.87 | 52.51 | 50.43 | 46.33 | 39.63 | 28.82 |
142
- | v27_dpo_model_7b-alpha-v1.27.2.25 | 51.58 | 76.33 | 66.81 | 91.54 | 85.57 | 73.9 | 67.78 | 65.5 | 55.91 | 44.53 |
143
 
144
  #### English Benchmarks
145
 
@@ -163,7 +163,7 @@ All models were evaluated using our proprietary evaluation pipeline and [LM Eval
163
  | Llama-3.1-8B-Instruct | 42.38 | 55.12 | 27.01 | 66.69 | 73.88 | 79.28 | 70.08 | 41.16 | 34.14 | 67.97 | 54.05 | 42.7 | 57.55 | 75.82 |
164
  | jais-family-13b-chat | 30.31 | 47.87 | 25.89 | 65.91 | 65.04 | 75 | 35.82 | 24.4 | 18.92 | 51.91 | 40.57 | 20.52 | 31.89 | 64.59 |
165
  | Mistral-Small-Instruct-2409 | 40.76 | 60.49 | 25.89 | 72.27 | 78.53 | 85.35 | 79.11 | 47.47 | 39.42 | 69.42 | 56.35 | 58.23 | 68.35 | 81.43 |
166
- | v27_dpo_model_7b-alpha-v1.27.2.25 | 41.75 | 51.28 | 22.1 | 73.17 | 70.48 | 76.26 | 15.96 | 30.43 | 17.42 | 59.6 | 46.67 | 37.71 | 48.68 | 62.02 | 85.35 | 79.11 | 47.47 | 36.73 | 69.42 | 56.35 | 58.23 | 68.35 | 81.43 |
167
 
168
 
169
  ### MT-bench
@@ -194,4 +194,4 @@ If you found this work helpful or used any part of this work, please include the
194
  primaryClass={cs.CL},
195
  url={https://arxiv.org/abs/2407.15390},
196
  }
197
- ```
 
139
  | Llama-3.1-8B-Instruct | 54 | 70.54 | 51.9 | 70.01 | 76.99 | 62.42 | 56.53 | 42.83 | 44.67 | 30.76 |
140
  | jais-family-13b-chat | 45.07 | 71.18 | 46.83 | 60.92 | 50.87 | 54.83 | 58.14 | 41.67 | 47.73 | 31.72 |
141
  | Mistral-Small-Instruct-2409 | 38.73 | 68.93 | 44.03 | 62.16 | 75.87 | 52.51 | 50.43 | 46.33 | 39.63 | 28.82 |
142
+ | ALLaM-7B-Instruct-preview | 51.58 | 76.33 | 66.81 | 91.54 | 85.57 | 73.9 | 67.78 | 65.5 | 55.91 | 44.53 |
143
 
144
  #### English Benchmarks
145
 
 
163
  | Llama-3.1-8B-Instruct | 42.38 | 55.12 | 27.01 | 66.69 | 73.88 | 79.28 | 70.08 | 41.16 | 34.14 | 67.97 | 54.05 | 42.7 | 57.55 | 75.82 |
164
  | jais-family-13b-chat | 30.31 | 47.87 | 25.89 | 65.91 | 65.04 | 75 | 35.82 | 24.4 | 18.92 | 51.91 | 40.57 | 20.52 | 31.89 | 64.59 |
165
  | Mistral-Small-Instruct-2409 | 40.76 | 60.49 | 25.89 | 72.27 | 78.53 | 85.35 | 79.11 | 47.47 | 39.42 | 69.42 | 56.35 | 58.23 | 68.35 | 81.43 |
166
+ | ALLaM-7B-Instruct-preview | 41.75 | 51.28 | 22.1 | 73.17 | 70.48 | 76.26 | 15.96 | 30.43 | 17.42 | 59.6 | 46.67 | 37.71 | 48.68 | 62.02 | 85.35 | 79.11 | 47.47 | 36.73 | 69.42 | 56.35 | 58.23 | 68.35 | 81.43 |
167
 
168
 
169
  ### MT-bench
 
194
  primaryClass={cs.CL},
195
  url={https://arxiv.org/abs/2407.15390},
196
  }
197
+ ```