1{ 2 "dim": 4096, 3 "ffn_dim_multiplier": 1.3, 4 "fusion_interval": 4, 5 "intermediate_dim": 14336, 6 "multiple_of": 1024, 7 "n_heads": 32, 8 "n_kv_heads": 8, 9 "n_layers": 32, 10 "n_special_tokens": 8, 11 "norm_eps": 1e-05, 12 "rope_theta": 500000.0, 13 "use_scaled_rope": true, 14 "vision_chunk_size": 560, 15 "vision_max_num_chunks": 4, 16 "vocab_size": 128256, 17 "vision_num_cross_attention_layers": 8 18} 19