Add files using upload-large-folder tool
Browse files- config.json +51 -0
- generation_config.json +7 -0
- merges.txt +0 -0
- model.safetensors +3 -0
- quant_log.csv +145 -0
- quantize_config.json +21 -0
- special_tokens_map.json +24 -0
- tokenizer.json +0 -0
- tokenizer_config.json +32 -0
- vocab.json +0 -0
config.json
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_remove_final_layer_norm": false,
|
3 |
+
"activation_dropout": 0.0,
|
4 |
+
"activation_function": "relu",
|
5 |
+
"architectures": [
|
6 |
+
"OPTForCausalLM"
|
7 |
+
],
|
8 |
+
"attention_dropout": 0.0,
|
9 |
+
"bos_token_id": 2,
|
10 |
+
"do_layer_norm_before": true,
|
11 |
+
"dropout": 0.1,
|
12 |
+
"enable_bias": true,
|
13 |
+
"eos_token_id": 2,
|
14 |
+
"ffn_dim": 8192,
|
15 |
+
"hidden_size": 2048,
|
16 |
+
"init_std": 0.02,
|
17 |
+
"layer_norm_elementwise_affine": true,
|
18 |
+
"layerdrop": 0.0,
|
19 |
+
"max_position_embeddings": 2048,
|
20 |
+
"model_type": "opt",
|
21 |
+
"num_attention_heads": 32,
|
22 |
+
"num_hidden_layers": 24,
|
23 |
+
"pad_token_id": 1,
|
24 |
+
"prefix": "</s>",
|
25 |
+
"quantization_config": {
|
26 |
+
"bits": 4,
|
27 |
+
"checkpoint_format": "gptq",
|
28 |
+
"desc_act": false,
|
29 |
+
"group_size": 128,
|
30 |
+
"lm_head": false,
|
31 |
+
"meta": {
|
32 |
+
"damp_auto_increment": 0.0025,
|
33 |
+
"damp_percent": 0.01,
|
34 |
+
"mse": 0.0,
|
35 |
+
"quantizer": [
|
36 |
+
"gptqmodel:2.2.0"
|
37 |
+
],
|
38 |
+
"static_groups": false,
|
39 |
+
"true_sequential": true,
|
40 |
+
"uri": "https://github.com/modelcloud/gptqmodel"
|
41 |
+
},
|
42 |
+
"pack_dtype": "int32",
|
43 |
+
"quant_method": "gptq",
|
44 |
+
"sym": true
|
45 |
+
},
|
46 |
+
"torch_dtype": "float16",
|
47 |
+
"transformers_version": "4.51.3",
|
48 |
+
"use_cache": true,
|
49 |
+
"vocab_size": 50272,
|
50 |
+
"word_embed_proj_dim": 2048
|
51 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 2,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"pad_token_id": 1,
|
6 |
+
"transformers_version": "4.51.3"
|
7 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1fdc52cf6152a71be7ec16ab1b22c8af5892676eed314b2b59ecbf1222f690c6
|
3 |
+
size 845033800
|
quant_log.csv
ADDED
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
layer,module,loss,samples,damp,time
|
2 |
+
0,self_attn.k_proj,0.01193101,0.01000,0.766
|
3 |
+
0,self_attn.v_proj,0.00204742,0.01000,0.744
|
4 |
+
0,self_attn.q_proj,0.01151794,0.01000,0.751
|
5 |
+
0,self_attn.out_proj,0.00000879,0.01000,0.750
|
6 |
+
0,fc1,0.07220605,0.01000,0.777
|
7 |
+
0,fc2,0.00009904,0.01000,3.377
|
8 |
+
1,self_attn.k_proj,0.02748547,0.01000,0.773
|
9 |
+
1,self_attn.v_proj,0.00377348,0.01000,0.771
|
10 |
+
1,self_attn.q_proj,0.03053766,0.01000,0.799
|
11 |
+
1,self_attn.out_proj,0.00000212,0.01000,0.788
|
12 |
+
1,fc1,0.10564113,0.01000,0.766
|
13 |
+
1,fc2,0.00011769,0.01000,3.389
|
14 |
+
2,self_attn.k_proj,0.03364365,0.01000,0.774
|
15 |
+
2,self_attn.v_proj,0.00638163,0.01000,0.770
|
16 |
+
2,self_attn.q_proj,0.03404787,0.01000,0.763
|
17 |
+
2,self_attn.out_proj,0.00000442,0.01000,0.761
|
18 |
+
2,fc1,0.10685527,0.01000,0.783
|
19 |
+
2,fc2,0.00007892,0.01000,3.447
|
20 |
+
3,self_attn.k_proj,0.03226093,0.01000,0.772
|
21 |
+
3,self_attn.v_proj,0.00772221,0.01000,0.755
|
22 |
+
3,self_attn.q_proj,0.03518277,0.01000,0.763
|
23 |
+
3,self_attn.out_proj,0.00000809,0.01000,0.790
|
24 |
+
3,fc1,0.09162843,0.01000,0.769
|
25 |
+
3,fc2,0.00007157,0.01000,3.342
|
26 |
+
4,self_attn.k_proj,0.03976664,0.01000,0.771
|
27 |
+
4,self_attn.v_proj,0.00833308,0.01000,0.770
|
28 |
+
4,self_attn.q_proj,0.04298474,0.01000,0.770
|
29 |
+
4,self_attn.out_proj,0.00001194,0.01000,0.785
|
30 |
+
4,fc1,0.08744164,0.01000,0.789
|
31 |
+
4,fc2,0.00008075,0.01000,3.376
|
32 |
+
5,self_attn.k_proj,0.04211287,0.01000,0.773
|
33 |
+
5,self_attn.v_proj,0.00969345,0.01000,0.766
|
34 |
+
5,self_attn.q_proj,0.04678004,0.01000,0.765
|
35 |
+
5,self_attn.out_proj,0.00001155,0.01000,0.764
|
36 |
+
5,fc1,0.08947679,0.01000,0.773
|
37 |
+
5,fc2,0.00009699,0.01000,3.370
|
38 |
+
6,self_attn.k_proj,0.06050922,0.01000,0.801
|
39 |
+
6,self_attn.v_proj,0.01090727,0.01000,0.750
|
40 |
+
6,self_attn.q_proj,0.05891616,0.01000,0.755
|
41 |
+
6,self_attn.out_proj,0.00003464,0.01000,0.763
|
42 |
+
6,fc1,0.09683321,0.01000,0.768
|
43 |
+
6,fc2,0.00013900,0.01000,3.376
|
44 |
+
7,self_attn.k_proj,0.06143640,0.01000,0.792
|
45 |
+
7,self_attn.v_proj,0.01166906,0.01000,0.763
|
46 |
+
7,self_attn.q_proj,0.05640485,0.01000,0.786
|
47 |
+
7,self_attn.out_proj,0.00004408,0.01000,0.763
|
48 |
+
7,fc1,0.10895248,0.01000,0.778
|
49 |
+
7,fc2,0.00020347,0.01000,3.344
|
50 |
+
8,self_attn.k_proj,0.06507521,0.01000,0.768
|
51 |
+
8,self_attn.v_proj,0.01208323,0.01000,0.777
|
52 |
+
8,self_attn.q_proj,0.05399566,0.01000,0.760
|
53 |
+
8,self_attn.out_proj,0.00005616,0.01000,0.763
|
54 |
+
8,fc1,0.11881161,0.01000,0.782
|
55 |
+
8,fc2,0.00026443,0.01000,3.333
|
56 |
+
9,self_attn.k_proj,0.06506123,0.01000,0.786
|
57 |
+
9,self_attn.v_proj,0.01248781,0.01000,0.765
|
58 |
+
9,self_attn.q_proj,0.05453248,0.01000,0.783
|
59 |
+
9,self_attn.out_proj,0.00007938,0.01000,0.764
|
60 |
+
9,fc1,0.12274915,0.01000,0.769
|
61 |
+
9,fc2,0.00035536,0.01000,3.326
|
62 |
+
10,self_attn.k_proj,0.06140727,0.01000,0.768
|
63 |
+
10,self_attn.v_proj,0.01420014,0.01000,0.767
|
64 |
+
10,self_attn.q_proj,0.05016806,0.01000,0.763
|
65 |
+
10,self_attn.out_proj,0.00011017,0.01000,0.760
|
66 |
+
10,fc1,0.12526228,0.01000,0.774
|
67 |
+
10,fc2,0.00056714,0.01000,3.383
|
68 |
+
11,self_attn.k_proj,0.05852431,0.01000,0.759
|
69 |
+
11,self_attn.v_proj,0.01596023,0.01000,0.767
|
70 |
+
11,self_attn.q_proj,0.04750057,0.01000,0.764
|
71 |
+
11,self_attn.out_proj,0.00014029,0.01000,0.774
|
72 |
+
11,fc1,0.12785329,0.01000,0.775
|
73 |
+
11,fc2,0.00075469,0.01000,3.320
|
74 |
+
12,self_attn.k_proj,0.06211779,0.01000,0.773
|
75 |
+
12,self_attn.v_proj,0.01650352,0.01000,0.763
|
76 |
+
12,self_attn.q_proj,0.04717031,0.01000,0.777
|
77 |
+
12,self_attn.out_proj,0.00022129,0.01000,0.761
|
78 |
+
12,fc1,0.12406510,0.01000,0.769
|
79 |
+
12,fc2,0.00102223,0.01000,3.390
|
80 |
+
13,self_attn.k_proj,0.06443056,0.01000,0.781
|
81 |
+
13,self_attn.v_proj,0.01726957,0.01000,0.772
|
82 |
+
13,self_attn.q_proj,0.04661303,0.01000,0.758
|
83 |
+
13,self_attn.out_proj,0.00023633,0.01000,0.765
|
84 |
+
13,fc1,0.13054048,0.01000,0.792
|
85 |
+
13,fc2,0.00139816,0.01000,3.390
|
86 |
+
14,self_attn.k_proj,0.05847587,0.01000,0.771
|
87 |
+
14,self_attn.v_proj,0.02090536,0.01000,0.770
|
88 |
+
14,self_attn.q_proj,0.04376426,0.01000,0.788
|
89 |
+
14,self_attn.out_proj,0.00027757,0.01000,0.761
|
90 |
+
14,fc1,0.13830929,0.01000,0.779
|
91 |
+
14,fc2,0.00192462,0.01000,3.343
|
92 |
+
15,self_attn.k_proj,0.05246258,0.01000,0.773
|
93 |
+
15,self_attn.v_proj,0.02610055,0.01000,0.773
|
94 |
+
15,self_attn.q_proj,0.04327298,0.01000,0.758
|
95 |
+
15,self_attn.out_proj,0.00027173,0.01000,0.767
|
96 |
+
15,fc1,0.14475755,0.01000,0.775
|
97 |
+
15,fc2,0.00244589,0.01000,3.321
|
98 |
+
16,self_attn.k_proj,0.05219132,0.01000,0.786
|
99 |
+
16,self_attn.v_proj,0.02799954,0.01000,0.762
|
100 |
+
16,self_attn.q_proj,0.03783621,0.01000,0.763
|
101 |
+
16,self_attn.out_proj,0.00039026,0.01000,0.769
|
102 |
+
16,fc1,0.15572587,0.01000,0.782
|
103 |
+
16,fc2,0.00316169,0.01000,3.376
|
104 |
+
17,self_attn.k_proj,0.05054565,0.01000,0.830
|
105 |
+
17,self_attn.v_proj,0.03031302,0.01000,0.771
|
106 |
+
17,self_attn.q_proj,0.03752312,0.01000,0.761
|
107 |
+
17,self_attn.out_proj,0.00044570,0.01000,0.765
|
108 |
+
17,fc1,0.16843134,0.01000,0.772
|
109 |
+
17,fc2,0.00403455,0.01000,3.315
|
110 |
+
18,self_attn.k_proj,0.04542822,0.01000,0.766
|
111 |
+
18,self_attn.v_proj,0.03442155,0.01000,0.753
|
112 |
+
18,self_attn.q_proj,0.03605321,0.01000,0.759
|
113 |
+
18,self_attn.out_proj,0.00034129,0.01000,0.769
|
114 |
+
18,fc1,0.17829082,0.01000,0.769
|
115 |
+
18,fc2,0.00384275,0.01000,3.513
|
116 |
+
19,self_attn.k_proj,0.04639758,0.01000,0.772
|
117 |
+
19,self_attn.v_proj,0.04432996,0.01000,0.771
|
118 |
+
19,self_attn.q_proj,0.03613599,0.01000,0.770
|
119 |
+
19,self_attn.out_proj,0.00052690,0.01000,0.781
|
120 |
+
19,fc1,0.19145459,0.01000,0.782
|
121 |
+
19,fc2,0.00470920,0.01000,3.507
|
122 |
+
20,self_attn.k_proj,0.04799912,0.01000,0.791
|
123 |
+
20,self_attn.v_proj,0.05039908,0.01000,0.762
|
124 |
+
20,self_attn.q_proj,0.03520311,0.01000,0.781
|
125 |
+
20,self_attn.out_proj,0.00063094,0.01000,0.774
|
126 |
+
20,fc1,0.19941745,0.01000,0.783
|
127 |
+
20,fc2,0.00606712,0.01000,3.514
|
128 |
+
21,self_attn.k_proj,0.04461705,0.01000,0.797
|
129 |
+
21,self_attn.v_proj,0.05256410,0.01000,0.784
|
130 |
+
21,self_attn.q_proj,0.03900132,0.01000,0.821
|
131 |
+
21,self_attn.out_proj,0.00054641,0.01000,0.773
|
132 |
+
21,fc1,0.20300829,0.01000,0.783
|
133 |
+
21,fc2,0.00714681,0.01000,3.591
|
134 |
+
22,self_attn.k_proj,0.04705104,0.01000,0.829
|
135 |
+
22,self_attn.v_proj,0.06090737,0.01000,0.750
|
136 |
+
22,self_attn.q_proj,0.04897617,0.01000,0.805
|
137 |
+
22,self_attn.out_proj,0.00065823,0.01000,0.755
|
138 |
+
22,fc1,0.20262627,0.01000,0.766
|
139 |
+
22,fc2,0.00769133,0.01000,3.481
|
140 |
+
23,self_attn.k_proj,0.06682779,0.01000,0.775
|
141 |
+
23,self_attn.v_proj,0.04476626,0.01000,0.787
|
142 |
+
23,self_attn.q_proj,0.11386553,0.01000,0.775
|
143 |
+
23,self_attn.out_proj,0.00203764,0.01000,0.774
|
144 |
+
23,fc1,0.19634233,0.01000,0.810
|
145 |
+
23,fc2,0.00576396,0.01000,3.536
|
quantize_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bits": 4,
|
3 |
+
"group_size": 128,
|
4 |
+
"desc_act": false,
|
5 |
+
"sym": true,
|
6 |
+
"lm_head": false,
|
7 |
+
"quant_method": "gptq",
|
8 |
+
"checkpoint_format": "gptq",
|
9 |
+
"pack_dtype": "int32",
|
10 |
+
"meta": {
|
11 |
+
"quantizer": [
|
12 |
+
"gptqmodel:2.2.0"
|
13 |
+
],
|
14 |
+
"uri": "https://github.com/modelcloud/gptqmodel",
|
15 |
+
"damp_percent": 0.01,
|
16 |
+
"damp_auto_increment": 0.0025,
|
17 |
+
"static_groups": false,
|
18 |
+
"true_sequential": true,
|
19 |
+
"mse": 0.0
|
20 |
+
}
|
21 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "</s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": true,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": true,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": "<pad>",
|
17 |
+
"unk_token": {
|
18 |
+
"content": "</s>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": true,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
}
|
24 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_prefix_space": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"1": {
|
6 |
+
"content": "<pad>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": true,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"2": {
|
14 |
+
"content": "</s>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": true,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
}
|
21 |
+
},
|
22 |
+
"bos_token": "</s>",
|
23 |
+
"clean_up_tokenization_spaces": false,
|
24 |
+
"eos_token": "</s>",
|
25 |
+
"errors": "replace",
|
26 |
+
"extra_special_tokens": {},
|
27 |
+
"model_max_length": 1000000000000000019884624838656,
|
28 |
+
"pad_token": "<pad>",
|
29 |
+
"tokenizer_class": "GPT2TokenizerFast",
|
30 |
+
"unk_token": "</s>",
|
31 |
+
"_commit_hash": null
|
32 |
+
}
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|