kmfoda commited on
Commit
4868714
·
verified ·
1 Parent(s): 7873dd1

Run 3. Outer Step 12. Inner Step 374.

Browse files
Files changed (3) hide show
  1. config.json +7 -8
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,20 +268,19 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5468756,
272
- 5468760,
273
- 5468764,
274
- 5468768,
275
- 5468773,
276
- 5468777,
277
- 5468781
278
  ],
279
  "block_size": 1024,
280
  "bos_token_id": 50256,
281
  "embd_pdrop": 0.1,
282
  "eos_token_id": 50256,
283
  "initializer_range": 0.02,
284
- "inner_step": 373,
285
  "inner_steps": 0,
286
  "last_allreduce_block": 5450451,
287
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5468786,
272
+ 5468790,
273
+ 5468795,
274
+ 5468799,
275
+ 5468803,
276
+ 5468808
 
277
  ],
278
  "block_size": 1024,
279
  "bos_token_id": 50256,
280
  "embd_pdrop": 0.1,
281
  "eos_token_id": 50256,
282
  "initializer_range": 0.02,
283
+ "inner_step": 374,
284
  "inner_steps": 0,
285
  "last_allreduce_block": 5450451,
286
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a57f10d6802201deb9ede2ab2b5df9fa3bb31ab46f37703537c8db3fd06f5cca
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4b938c9caa9159c66b41ed883cd0dd6a40baa67de7421c73e0128d98057b517
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:32c117499b35d8b75dd865d5d5eebd111641a7d9ed68959bd20b305dfdd8b77c
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8348264daaa182070ae2d1eb633d825a38e35d2351a6603d073e1bb70e81db23
3
  size 4040701744