{ "architectures": [ "OpenHermesModel" ], "model_type": "gpt", "num_attention_heads": 12, "num_hidden_layers": 24, "hidden_size": 1024, "vocab_size": 50257, "n_positions": 1024, "n_ctx": 1024, "activation_function": "gelu_new" }