File size: 339 Bytes
b4263ca
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
{
    "learning_rate": 0.001,
    "weight_decay": 1e-05,
    "epochs": 20,
    "batch_size": 32,
    "embedding_dim": 64,
    "model_dir": "models",
    "hidden_layers": [
        256,
        128,
        64
    ],
    "dropout": 0.3,
    "early_stopping_patience": 2,
    "max_grad_norm": 1.0,
    "l1_lambda": 1e-05,
    "n_splits": 5
}