bibleGPT / config.json
aframson's picture
first
a018629
raw
history blame
630 Bytes
{
"architectures": [
"GPT2LMHeadModel"
],
"auto_map": {
"AutoConfig": "GPT2Config",
"AutoModel": "GPT2Model",
"AutoModelForCausalLM": "GPT2LMHeadModel",
"AutoModelForQuestionAnswering": "GPT2ForQuestionAnswering"
},
"batch_size": 12,
"block_size": 100,
"device": "cpu",
"eval_interval": 250,
"hidden_dropout_prob": 0.0,
"hidden_size": 300,
"learning_rate": 0.001,
"max_iters": 6000,
"num_attention_heads": 6,
"num_hidden_layers": 6,
"torch_dtype": "float16",
"transformers_version": "4.33.2",
"vocab_size": 1000
}