{ "architectures": [ "AbcTransformer" ], "auto_map": { "AutoModelForCausalLM": "transformers_model.AbcTransformer" }, "block_size": 128, "device": "cpu", "dropout": 0.2, "model_type": "abc-transformer", "n_embd": 8, "n_heads": 1, "n_layers": 1, "torch_dtype": "float32", "transformers_version": "4.34.1", "vocab_size": 113 }