xoakkeyy / config.json
miittnnss's picture
Update config.json
2392f29
raw
history blame contribute delete
904 Bytes
{
"model_name": "xoakkeyy-gan",
"model_type": "dcgan",
"generator": {
"input_shape": [100],
"hidden_layers": [
{"units": 256, "activation": "relu"},
{"units": 512, "activation": "relu"},
{"units": 1024, "activation": "relu"}
],
"output_shape": [64, 64, 3],
"output_activation": "tanh"
},
"discriminator": {
"input_shape": [64, 64, 3],
"hidden_layers": [
{"units": 512, "activation": "relu"},
{"units": 256, "activation": "relu"}
],
"output_shape": [1],
"output_activation": "sigmoid"
},
"optimizer": {
"generator": {
"type": "Adam",
"learning_rate": 0.0002,
"beta_1": 0.5
},
"discriminator": {
"type": "Adam",
"learning_rate": 0.0002,
"beta_1": 0.5
}
},
"loss_functions": {
"generator": "binary_crossentropy",
"discriminator": "binary_crossentropy"
}
}