tanoManzo's picture
End of training
4b4505d verified
{
"_name_or_path": "LongSafari/hyenadna-small-32k-seqlen-hf",
"activation_freq": 10,
"architectures": [
"HyenaDNAForSequenceClassification"
],
"auto_map": {
"AutoConfig": "LongSafari/hyenadna-small-32k-seqlen-hf--configuration_hyena.HyenaConfig",
"AutoModel": "LongSafari/hyenadna-small-32k-seqlen-hf--modeling_hyena.HyenaDNAModel",
"AutoModelForCausalLM": "LongSafari/hyenadna-small-32k-seqlen-hf--modeling_hyena.HyenaDNAForCausalLM",
"AutoModelForSequenceClassification": "LongSafari/hyenadna-small-32k-seqlen-hf--modeling_hyena.HyenaDNAForSequenceClassification"
},
"d_inner": 1024,
"d_model": 256,
"emb_dim": 5,
"embed_dropout": 0.1,
"filter_order": 64,
"hyena_dropout": 0.0,
"hyena_filter_dropout": 0.0,
"hyena_order": 2,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"max_seq_len": 32770,
"model_type": "hyenadna",
"n_layer": 4,
"num_inner_mlps": 2,
"pad_token_id": 4,
"pad_vocab_size_multiple": 8,
"problem_type": "single_label_classification",
"short_filter_order": 3,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"train_freq": true,
"transformers_version": "4.42.3",
"use_bias": true,
"vocab_size": 12
}