Upload llama_7b_seq8k_bs2M_skyladder_decay/0000050000/params.json with huggingface_hub
Browse files
llama_7b_seq8k_bs2M_skyladder_decay/0000050000/params.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"name": "llama_7b_seq8k_bs2M_skyladder_decay", "dump_dir": "lingua_local_dump", "seed": 777, "grad_acc_steps": 4, "gc_collect_freq": 1000, "probe_freq": 100, "steps": 50000, "data": {"root_dir": "/mnt/hdfs/tiktok_aiic/user/liuqian", "sources": {"dclm_pro": 1.0}, "batch_size": 1, "seq_len": 8192, "n_views": 2, "seed": 42, "add_bos": true, "add_eos": true, "load_async": true, "prefetch_size": 1024, "tokenizer": {"name": "sp", "path": "/opt/tiger/Github-Repo/lingua/tokenizers/llama2/tokenizer.model"}}, "optim": {"lr": 4e-05, "weight_decay": 0.01, "epsilon": 1e-08, "beta1": 0.9, "beta2": 0.95, "clip": 1.0, "scheduler": "cosine", "warmup": 100, "lr_min_ratio": 1.0, "cycle_length": 1.0, "cosine_theta": 1.0, "annealing_step": 1000, "decay_fraction": 0.1, "exp_factor": 0.5}, "model": {"dim": 4096, "n_layers": 32, "head_dim": null, "n_heads": 32, "n_kv_heads": null, "ffn_dim_multiplier": 1.0, "multiple_of": 256, "norm_eps": 1e-05, "rope_theta": 100000.0, "init_base_std": null, "init_std_factor": "disabled", "max_seqlen": 8192, "seed": 42, "vocab_size": 32000, "weight_tying": false, "sliding_window": null}, "distributed": {"dp_shard": 1, "dp_replicate": 64, "tp_size": 1, "selective_activation_checkpointing": false, "compile": true, "fsdp_type": "full_shard", "model_dtype": "bf16", "float8_recipe": null, "float8_filter": "layers\\.[0-9]+\\.", "matmul_allow_tf32": false, "detect_anomaly": false, "compile_cache_size_limit": 8, "spawn_method": "forkserver"}, "env": {"MKL_SERVICE_FORCE_INTEL": "GNU", "OMP_NUM_THREADS": "1", "MKL_NUM_THREADS": "1", "ENABLE_INTRA_NODE_COMM": "1", "TORCH_NCCL_AVOID_RECORD_STREAMS": "1", "NCCL_IB_TIMEOUT": "22", "NCCL_DEBUG": "INFO", "TORCH_NCCL_ASYNC_ERROR_HANDLING": "1"}, "checkpoint": {"dump": {"every": 10000, "keep": 0}, "eval": {"every": 1000, "keep": 0}, "path": "/mnt/hdfs/tiktok_aiic/user/liuqian/lingua_checkpoints/llama_7b_seq8k_bs2M_skyladder_decay", "init_ckpt_path": "/mnt/hdfs/tiktok_aiic/user/liuqian/lingua_checkpoints/llama_7b_seq8k_bs2M_skyladder/0000250000", "continue_training_from_init": false}, "profiling": {"run": true, "trace_folder": "profiling", "mem_warmup": 0, "mem_steps": 4, "profile_warmup": 100, "profile_steps": 4}, "logging": {"freq": 10, "acc_freq": null, "wandb": {"job_type": null, "dir": null, "project": "lingua", "entity": "SivilTaram", "tags": null, "group": null, "name": "llama_7b_seq8k_bs2M_skyladder_decay", "notes": null, "config_exclude_keys": null, "config_include_keys": null, "anonymous": null, "mode": null, "allow_val_change": null, "resume": null, "force": null, "tensorboard": null, "sync_tensorboard": null, "monitor_gym": null, "save_code": null, "id": null, "fork_from": null, "resume_from": null}}, "async_eval_gpus": null, "eval": null, "intradoc_mask": false, "doc_separator": 2, "dynamic_mask": false}
|