Ababababababbababa's picture
Duplicate from arbml/Ashaar
6faf7e7
session_name: base
data_directory: "data"
data_type: "CA_MSA"
log_directory: "log_dir"
base_model_path: "ashaar-from-scratch-with-spaces-no-tatweel-epochs-75"
load_training_data: true
load_test_data: false
load_validation_data: true
n_training_examples: null # null load all training examples, good for fast loading
n_test_examples: null # null load all test examples
n_validation_examples: null # null load all validation examples
test_file_name: "test.csv"
is_data_preprocessed: false # The data file is organized as (original text | text | diacritics)
data_separator: '|' # Required if the data already processed
diacritics_separator: '*' # Required if the data already processed
text_encoder: ArabicEncoderWithStartSymbol
text_cleaner: valid_arabic_cleaners # a white list that uses only Arabic letters, punctuations, and a space
max_len: 600 # sentences larger than this size will not be used
max_steps: 5000
learning_rate: 0.001
batch_size: 64
adam_beta1: 0.9
adam_beta2: 0.999
use_decay: true
weight_decay: 0.0
use_mixed_precision: false
optimizer_type: Adam
device: cuda
max_sen_len: 256
freeze: True
n_layer: -1
use_lstm: False
# LOGGING
evaluate_frequency: 50000000
max_eval_batches: -1
evaluate_with_error_rates_frequency: 1000
n_predicted_text_tensorboard: 10 # To be written to the tensorboard
model_save_frequency: 5000
train_plotting_frequency: 50000000 # No plotting for this model
n_steps_avg_losses: [100, 500, 1_000, 5_000] # command line display of average loss values for the last n steps
error_rates_n_batches: 10000 # if calculating error rate is slow, then you can specify the number of batches to be calculated
test_model_path: null # load the last saved model
train_resume_model_path: null # load last saved model