Add files using upload-large-folder tool
Browse files- config.json +51 -0
- generation_config.json +7 -0
- merges.txt +0 -0
- model.safetensors +3 -0
- quant_log.csv +145 -0
- quantize_config.json +21 -0
- special_tokens_map.json +24 -0
- tokenizer.json +0 -0
- tokenizer_config.json +32 -0
- vocab.json +0 -0
config.json
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_remove_final_layer_norm": false,
|
3 |
+
"activation_dropout": 0.0,
|
4 |
+
"activation_function": "relu",
|
5 |
+
"architectures": [
|
6 |
+
"OPTForCausalLM"
|
7 |
+
],
|
8 |
+
"attention_dropout": 0.0,
|
9 |
+
"bos_token_id": 2,
|
10 |
+
"do_layer_norm_before": true,
|
11 |
+
"dropout": 0.1,
|
12 |
+
"enable_bias": true,
|
13 |
+
"eos_token_id": 2,
|
14 |
+
"ffn_dim": 8192,
|
15 |
+
"hidden_size": 2048,
|
16 |
+
"init_std": 0.02,
|
17 |
+
"layer_norm_elementwise_affine": true,
|
18 |
+
"layerdrop": 0.0,
|
19 |
+
"max_position_embeddings": 2048,
|
20 |
+
"model_type": "opt",
|
21 |
+
"num_attention_heads": 32,
|
22 |
+
"num_hidden_layers": 24,
|
23 |
+
"pad_token_id": 1,
|
24 |
+
"prefix": "</s>",
|
25 |
+
"quantization_config": {
|
26 |
+
"bits": 4,
|
27 |
+
"checkpoint_format": "gptq",
|
28 |
+
"desc_act": false,
|
29 |
+
"group_size": 128,
|
30 |
+
"lm_head": false,
|
31 |
+
"meta": {
|
32 |
+
"damp_auto_increment": 0.0025,
|
33 |
+
"damp_percent": 0.01,
|
34 |
+
"mse": 0.0,
|
35 |
+
"quantizer": [
|
36 |
+
"gptqmodel:2.2.0"
|
37 |
+
],
|
38 |
+
"static_groups": false,
|
39 |
+
"true_sequential": true,
|
40 |
+
"uri": "https://github.com/modelcloud/gptqmodel"
|
41 |
+
},
|
42 |
+
"pack_dtype": "int32",
|
43 |
+
"quant_method": "gptq",
|
44 |
+
"sym": true
|
45 |
+
},
|
46 |
+
"torch_dtype": "float16",
|
47 |
+
"transformers_version": "4.51.3",
|
48 |
+
"use_cache": true,
|
49 |
+
"vocab_size": 50272,
|
50 |
+
"word_embed_proj_dim": 2048
|
51 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 2,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"pad_token_id": 1,
|
6 |
+
"transformers_version": "4.51.3"
|
7 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ed124f61348da0cb4e0a7c799ec5329571a4befcade8d125a62a086d4e5e120b
|
3 |
+
size 845033800
|
quant_log.csv
ADDED
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
layer,module,loss,samples,damp,time
|
2 |
+
0,self_attn.k_proj,0.00353451,0.01000,0.969
|
3 |
+
0,self_attn.v_proj,0.00058977,0.01000,0.974
|
4 |
+
0,self_attn.q_proj,0.00354618,0.01000,0.978
|
5 |
+
0,self_attn.out_proj,0.00000824,0.01000,0.970
|
6 |
+
0,fc1,0.00833520,0.01000,1.004
|
7 |
+
0,fc2,0.00010249,0.01000,4.179
|
8 |
+
1,self_attn.k_proj,0.00306776,0.01000,0.981
|
9 |
+
1,self_attn.v_proj,0.00038127,0.01000,0.983
|
10 |
+
1,self_attn.q_proj,0.00334425,0.01000,0.977
|
11 |
+
1,self_attn.out_proj,0.00000175,0.01000,0.984
|
12 |
+
1,fc1,0.00991357,0.01000,1.006
|
13 |
+
1,fc2,0.00011309,0.01000,4.198
|
14 |
+
2,self_attn.k_proj,0.00421192,0.01000,1.006
|
15 |
+
2,self_attn.v_proj,0.00065938,0.01000,1.016
|
16 |
+
2,self_attn.q_proj,0.00423529,0.01000,1.001
|
17 |
+
2,self_attn.out_proj,0.00000309,0.01000,0.978
|
18 |
+
2,fc1,0.01081147,0.01000,0.995
|
19 |
+
2,fc2,0.00007957,0.01000,4.203
|
20 |
+
3,self_attn.k_proj,0.00461946,0.01000,0.982
|
21 |
+
3,self_attn.v_proj,0.00091849,0.01000,0.968
|
22 |
+
3,self_attn.q_proj,0.00513186,0.01000,0.985
|
23 |
+
3,self_attn.out_proj,0.00000551,0.01000,0.970
|
24 |
+
3,fc1,0.01119875,0.01000,1.003
|
25 |
+
3,fc2,0.00016196,0.01000,4.206
|
26 |
+
4,self_attn.k_proj,0.00732114,0.01000,0.975
|
27 |
+
4,self_attn.v_proj,0.00129839,0.01000,0.987
|
28 |
+
4,self_attn.q_proj,0.00789255,0.01000,0.991
|
29 |
+
4,self_attn.out_proj,0.00001026,0.01000,0.971
|
30 |
+
4,fc1,0.01312019,0.01000,1.002
|
31 |
+
4,fc2,0.00010168,0.01000,4.180
|
32 |
+
5,self_attn.k_proj,0.00943032,0.01000,0.995
|
33 |
+
5,self_attn.v_proj,0.00169851,0.01000,0.983
|
34 |
+
5,self_attn.q_proj,0.00934376,0.01000,0.988
|
35 |
+
5,self_attn.out_proj,0.00001065,0.01000,0.976
|
36 |
+
5,fc1,0.01450417,0.01000,0.995
|
37 |
+
5,fc2,0.00010014,0.01000,4.242
|
38 |
+
6,self_attn.k_proj,0.01406881,0.01000,0.979
|
39 |
+
6,self_attn.v_proj,0.00209196,0.01000,1.001
|
40 |
+
6,self_attn.q_proj,0.01253495,0.01000,0.997
|
41 |
+
6,self_attn.out_proj,0.00002485,0.01000,0.986
|
42 |
+
6,fc1,0.01773035,0.01000,0.989
|
43 |
+
6,fc2,0.00012900,0.01000,4.185
|
44 |
+
7,self_attn.k_proj,0.01532272,0.01000,1.006
|
45 |
+
7,self_attn.v_proj,0.00260288,0.01000,1.008
|
46 |
+
7,self_attn.q_proj,0.01319375,0.01000,0.989
|
47 |
+
7,self_attn.out_proj,0.00003269,0.01000,0.982
|
48 |
+
7,fc1,0.02224689,0.01000,1.003
|
49 |
+
7,fc2,0.00018148,0.01000,4.185
|
50 |
+
8,self_attn.k_proj,0.01737493,0.01000,1.003
|
51 |
+
8,self_attn.v_proj,0.00307547,0.01000,0.988
|
52 |
+
8,self_attn.q_proj,0.01380688,0.01000,1.017
|
53 |
+
8,self_attn.out_proj,0.00004196,0.01000,0.998
|
54 |
+
8,fc1,0.02693362,0.01000,0.998
|
55 |
+
8,fc2,0.00022905,0.01000,4.197
|
56 |
+
9,self_attn.k_proj,0.01874729,0.01000,0.995
|
57 |
+
9,self_attn.v_proj,0.00358431,0.01000,0.974
|
58 |
+
9,self_attn.q_proj,0.01549898,0.01000,0.986
|
59 |
+
9,self_attn.out_proj,0.00006392,0.01000,0.976
|
60 |
+
9,fc1,0.03071506,0.01000,1.002
|
61 |
+
9,fc2,0.00029924,0.01000,4.202
|
62 |
+
10,self_attn.k_proj,0.01883435,0.01000,0.986
|
63 |
+
10,self_attn.v_proj,0.00462759,0.01000,0.981
|
64 |
+
10,self_attn.q_proj,0.01545094,0.01000,1.009
|
65 |
+
10,self_attn.out_proj,0.00007908,0.01000,0.976
|
66 |
+
10,fc1,0.03539471,0.01000,1.005
|
67 |
+
10,fc2,0.00047551,0.01000,4.169
|
68 |
+
11,self_attn.k_proj,0.01965953,0.01000,0.976
|
69 |
+
11,self_attn.v_proj,0.00583622,0.01000,1.063
|
70 |
+
11,self_attn.q_proj,0.01612123,0.01000,0.979
|
71 |
+
11,self_attn.out_proj,0.00011278,0.01000,0.987
|
72 |
+
11,fc1,0.04005920,0.01000,1.000
|
73 |
+
11,fc2,0.00065392,0.01000,4.175
|
74 |
+
12,self_attn.k_proj,0.02262860,0.01000,0.982
|
75 |
+
12,self_attn.v_proj,0.00656720,0.01000,0.983
|
76 |
+
12,self_attn.q_proj,0.01734480,0.01000,1.003
|
77 |
+
12,self_attn.out_proj,0.00019552,0.01000,0.971
|
78 |
+
12,fc1,0.04258961,0.01000,0.996
|
79 |
+
12,fc2,0.00086177,0.01000,4.204
|
80 |
+
13,self_attn.k_proj,0.02449026,0.01000,1.010
|
81 |
+
13,self_attn.v_proj,0.00719500,0.01000,0.972
|
82 |
+
13,self_attn.q_proj,0.01807458,0.01000,0.984
|
83 |
+
13,self_attn.out_proj,0.00020925,0.01000,0.986
|
84 |
+
13,fc1,0.04750419,0.01000,1.048
|
85 |
+
13,fc2,0.00113821,0.01000,4.242
|
86 |
+
14,self_attn.k_proj,0.02323671,0.01000,0.980
|
87 |
+
14,self_attn.v_proj,0.00916689,0.01000,0.992
|
88 |
+
14,self_attn.q_proj,0.01773284,0.01000,0.984
|
89 |
+
14,self_attn.out_proj,0.00023394,0.01000,0.986
|
90 |
+
14,fc1,0.05279522,0.01000,1.008
|
91 |
+
14,fc2,0.00154404,0.01000,4.219
|
92 |
+
15,self_attn.k_proj,0.02133237,0.01000,0.974
|
93 |
+
15,self_attn.v_proj,0.01157838,0.01000,0.985
|
94 |
+
15,self_attn.q_proj,0.01792355,0.01000,0.985
|
95 |
+
15,self_attn.out_proj,0.00021368,0.01000,0.977
|
96 |
+
15,fc1,0.05596036,0.01000,0.992
|
97 |
+
15,fc2,0.00193929,0.01000,4.221
|
98 |
+
16,self_attn.k_proj,0.02109856,0.01000,0.973
|
99 |
+
16,self_attn.v_proj,0.01237937,0.01000,1.001
|
100 |
+
16,self_attn.q_proj,0.01584957,0.01000,0.985
|
101 |
+
16,self_attn.out_proj,0.00034128,0.01000,0.977
|
102 |
+
16,fc1,0.06065808,0.01000,0.993
|
103 |
+
16,fc2,0.00244585,0.01000,4.189
|
104 |
+
17,self_attn.k_proj,0.02028195,0.01000,0.977
|
105 |
+
17,self_attn.v_proj,0.01342661,0.01000,0.991
|
106 |
+
17,self_attn.q_proj,0.01595530,0.01000,0.993
|
107 |
+
17,self_attn.out_proj,0.00040838,0.01000,0.981
|
108 |
+
17,fc1,0.06696854,0.01000,0.992
|
109 |
+
17,fc2,0.00311560,0.01000,4.158
|
110 |
+
18,self_attn.k_proj,0.01867285,0.01000,0.974
|
111 |
+
18,self_attn.v_proj,0.01530573,0.01000,0.976
|
112 |
+
18,self_attn.q_proj,0.01553169,0.01000,0.966
|
113 |
+
18,self_attn.out_proj,0.00042700,0.01000,0.964
|
114 |
+
18,fc1,0.07043907,0.01000,1.026
|
115 |
+
18,fc2,0.00372020,0.01000,4.228
|
116 |
+
19,self_attn.k_proj,0.01878069,0.01000,0.981
|
117 |
+
19,self_attn.v_proj,0.01981840,0.01000,0.984
|
118 |
+
19,self_attn.q_proj,0.01559329,0.01000,1.002
|
119 |
+
19,self_attn.out_proj,0.00065731,0.01000,0.973
|
120 |
+
19,fc1,0.07642501,0.01000,0.999
|
121 |
+
19,fc2,0.00461285,0.01000,4.212
|
122 |
+
20,self_attn.k_proj,0.01907601,0.01000,0.976
|
123 |
+
20,self_attn.v_proj,0.02192091,0.01000,0.999
|
124 |
+
20,self_attn.q_proj,0.01519119,0.01000,0.988
|
125 |
+
20,self_attn.out_proj,0.00074141,0.01000,0.981
|
126 |
+
20,fc1,0.07924981,0.01000,0.996
|
127 |
+
20,fc2,0.00593197,0.01000,4.169
|
128 |
+
21,self_attn.k_proj,0.01771271,0.01000,0.975
|
129 |
+
21,self_attn.v_proj,0.02231180,0.01000,0.977
|
130 |
+
21,self_attn.q_proj,0.01639184,0.01000,0.989
|
131 |
+
21,self_attn.out_proj,0.00067609,0.01000,0.972
|
132 |
+
21,fc1,0.07996578,0.01000,0.996
|
133 |
+
21,fc2,0.00687802,0.01000,4.204
|
134 |
+
22,self_attn.k_proj,0.01819149,0.01000,0.976
|
135 |
+
22,self_attn.v_proj,0.02417441,0.01000,0.978
|
136 |
+
22,self_attn.q_proj,0.01968422,0.01000,0.975
|
137 |
+
22,self_attn.out_proj,0.00073485,0.01000,0.996
|
138 |
+
22,fc1,0.07849728,0.01000,0.997
|
139 |
+
22,fc2,0.00727349,0.01000,4.153
|
140 |
+
23,self_attn.k_proj,0.02505763,0.01000,0.992
|
141 |
+
23,self_attn.v_proj,0.01610191,0.01000,0.979
|
142 |
+
23,self_attn.q_proj,0.05779545,0.01000,0.980
|
143 |
+
23,self_attn.out_proj,0.00132728,0.01000,0.974
|
144 |
+
23,fc1,0.07213598,0.01000,1.002
|
145 |
+
23,fc2,0.00554883,0.01000,4.210
|
quantize_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bits": 4,
|
3 |
+
"group_size": 128,
|
4 |
+
"desc_act": false,
|
5 |
+
"sym": true,
|
6 |
+
"lm_head": false,
|
7 |
+
"quant_method": "gptq",
|
8 |
+
"checkpoint_format": "gptq",
|
9 |
+
"pack_dtype": "int32",
|
10 |
+
"meta": {
|
11 |
+
"quantizer": [
|
12 |
+
"gptqmodel:2.2.0"
|
13 |
+
],
|
14 |
+
"uri": "https://github.com/modelcloud/gptqmodel",
|
15 |
+
"damp_percent": 0.01,
|
16 |
+
"damp_auto_increment": 0.0025,
|
17 |
+
"static_groups": false,
|
18 |
+
"true_sequential": true,
|
19 |
+
"mse": 0.0
|
20 |
+
}
|
21 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "</s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": true,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": true,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": "<pad>",
|
17 |
+
"unk_token": {
|
18 |
+
"content": "</s>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": true,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
}
|
24 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_prefix_space": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"1": {
|
6 |
+
"content": "<pad>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": true,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"2": {
|
14 |
+
"content": "</s>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": true,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
}
|
21 |
+
},
|
22 |
+
"bos_token": "</s>",
|
23 |
+
"clean_up_tokenization_spaces": false,
|
24 |
+
"eos_token": "</s>",
|
25 |
+
"errors": "replace",
|
26 |
+
"extra_special_tokens": {},
|
27 |
+
"model_max_length": 1000000000000000019884624838656,
|
28 |
+
"pad_token": "<pad>",
|
29 |
+
"tokenizer_class": "GPT2TokenizerFast",
|
30 |
+
"unk_token": "</s>",
|
31 |
+
"_commit_hash": null
|
32 |
+
}
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|