{ "architectures": [ "AfmoeForCausalLM" ], "n_layers": 1, "vocab_size": 200192, "hidden_size": 3, "num_attention_heads": 4 }