{ | |
"activation_dropout": 0.1, | |
"attention_dropout": 0.0, | |
"convpos_depth": 2, | |
"convpos_groups": 16, | |
"convpos_width": 31, | |
"embedding_dim": 1280, | |
"hidden_dropout": 0.0, | |
"hidden_size": 1024, | |
"intermediate_size": 4096, | |
"n_feats": 80, | |
"n_tokens": 10000, | |
"num_attention_heads": 16, | |
"num_hidden_layers": 24, | |
"sigma_min": 0.0001, | |
"solver": "euler" | |
} |