multimodalart HF staff commited on
Commit
c795693
1 Parent(s): d3e8e32

cosmetic param change

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -254,9 +254,6 @@ def start_training(
254
  f"num_train_epochs={int(num_train_epochs)}",
255
  f"adam_weight_decay={adam_weight_decay}",
256
  f"adam_epsilon={adam_epsilon}",
257
- f"prodigy_decouple={prodigy_decouple}",
258
- f"prodigy_use_bias_correction={prodigy_use_bias_correction}",
259
- f"prodigy_safeguard_warmup={prodigy_safeguard_warmup}",
260
  f"max_grad_norm={max_grad_norm}",
261
  f"lr_num_cycles={int(lr_num_cycles)}",
262
  f"lr_power={lr_power}",
@@ -282,6 +279,12 @@ def start_training(
282
  commands.append(f"snr_gamma={snr_gamma}")
283
  if scale_lr:
284
  commands.append("scale_lr")
 
 
 
 
 
 
285
  if with_prior_preservation:
286
  commands.append("with_prior_preservation")
287
  commands.append(f"class_prompt={class_prompt}")
 
254
  f"num_train_epochs={int(num_train_epochs)}",
255
  f"adam_weight_decay={adam_weight_decay}",
256
  f"adam_epsilon={adam_epsilon}",
 
 
 
257
  f"max_grad_norm={max_grad_norm}",
258
  f"lr_num_cycles={int(lr_num_cycles)}",
259
  f"lr_power={lr_power}",
 
279
  commands.append(f"snr_gamma={snr_gamma}")
280
  if scale_lr:
281
  commands.append("scale_lr")
282
+ if prodigy_decouple:
283
+ commands.append("prodigy_decouple")
284
+ if prodigy_use_bias_correction:
285
+ commands.append("prodigy_use_bias_correction")
286
+ if prodigy_safeguard_warmup:
287
+ commands.append("prodigy_safeguard_warmup")
288
  if with_prior_preservation:
289
  commands.append("with_prior_preservation")
290
  commands.append(f"class_prompt={class_prompt}")