Upload Phi3ForCausalLM
Browse files- config.json +14 -6
- model.safetensors +2 -2
config.json
CHANGED
@@ -7,7 +7,7 @@
|
|
7 |
"embd_pdrop": 0.0,
|
8 |
"eos_token_id": 32000,
|
9 |
"hidden_act": "silu",
|
10 |
-
"hidden_size":
|
11 |
"initializer_range": 0.02,
|
12 |
"intermediate_size": 64,
|
13 |
"max_position_embeddings": 131072,
|
@@ -24,14 +24,22 @@
|
|
24 |
"long_factor": [
|
25 |
1,
|
26 |
2,
|
|
|
27 |
8,
|
28 |
-
|
|
|
|
|
|
|
29 |
],
|
30 |
"short_factor": [
|
31 |
-
1,
|
32 |
-
|
33 |
-
|
34 |
-
|
|
|
|
|
|
|
|
|
35 |
],
|
36 |
"type": "longrope"
|
37 |
},
|
|
|
7 |
"embd_pdrop": 0.0,
|
8 |
"eos_token_id": 32000,
|
9 |
"hidden_act": "silu",
|
10 |
+
"hidden_size": 64,
|
11 |
"initializer_range": 0.02,
|
12 |
"intermediate_size": 64,
|
13 |
"max_position_embeddings": 131072,
|
|
|
24 |
"long_factor": [
|
25 |
1,
|
26 |
2,
|
27 |
+
4,
|
28 |
8,
|
29 |
+
16,
|
30 |
+
32,
|
31 |
+
64,
|
32 |
+
128
|
33 |
],
|
34 |
"short_factor": [
|
35 |
+
1.0,
|
36 |
+
1.25,
|
37 |
+
1.5,
|
38 |
+
1.75,
|
39 |
+
2.0,
|
40 |
+
2.25,
|
41 |
+
2.5,
|
42 |
+
2.75
|
43 |
],
|
44 |
"type": "longrope"
|
45 |
},
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:313c04c2baa61b0ef2b637b2326233a0c37d3c6017cb6edeff3ff6d7330a8097
|
3 |
+
size 16649024
|