abc-transformer / config.json
SergioNC's picture
Upload AbcTransformer
225d271
raw
history blame contribute delete
361 Bytes
{
"architectures": [
"AbcTransformer"
],
"auto_map": {
"AutoModelForCausalLM": "transformers_model.AbcTransformer"
},
"block_size": 128,
"device": "cpu",
"dropout": 0.2,
"model_type": "abc-transformer",
"n_embd": 8,
"n_heads": 1,
"n_layers": 1,
"torch_dtype": "float32",
"transformers_version": "4.34.1",
"vocab_size": 113
}