{ "learning_rate": 0.001, "weight_decay": 1e-05, "epochs": 20, "batch_size": 32, "embedding_dim": 64, "model_dir": "models", "hidden_layers": [ 256, 128, 64 ], "dropout": 0.3, "early_stopping_patience": 2, "max_grad_norm": 1.0, "l1_lambda": 1e-05, "n_splits": 5 }