ioeddk's picture
Update config.json
ade5682 verified
raw
history blame contribute delete
247 Bytes
{
"emb": null,
"hidden_dim": 4096,
"is_qa_task": false,
"ltm_context": 100,
"mask_size": 990,
"max_n_segments": 6,
"n_cell_out": 8,
"segment_alignment": null,
"segment_size": 990,
"word_emb_dim": 2048,
"model_type": "llama"
}