{ "architectures": [ "MyLLaMa" ], "auto_map": { "AutoConfig": "configure_for_hf.MyLLaMaConfig", "AutoModelForCausalLM": "configure_for_hf.MyLLaMa" }, "embed_dim": 1536, "model_type": "LLaMa", "n_chckpnt_segments": 24, "n_heads": 24, "n_layers": 24, "torch_dtype": "float32", "transformers_version": "4.47.0.dev0" }