{ "_name_or_path": "/pretrainedmodel", "architectures": [ "GPTJXForCausalLM" ], "auto_map": { "AutoConfig": "pretrained_config.GPTJXConfig", "AutoModelForCausalLM": "pretrained_model.GPTJXForCausalLM" }, "bias": false, "block_size": 1024, "dropout": 0.0, "model_type": "nanogpt-j", "n_embd": 768, "n_head": 12, "n_layer": 12, "torch_dtype": "float32", "transformers_version": "4.39.3", "vocab_size": 52050 }