pere commited on
Commit
b6d6c02
1 Parent(s): 45a29c7

Saving weights and logs of epoch 60

Browse files
config.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f07f648665ff8cbcecaeb7fba7e51c4f6244b86d7c83085bfca11925b8cbb79f
3
- size 699
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f84d74dd7be6033a337c5857b9bbc37993375140addaf83691e24e181e9d4b5c
3
+ size 613
events.out.tfevents.1625842203.t1v-n-55481057-w-0.84882.3.v2 ADDED
Binary file (465 kB). View file
 
events.out.tfevents.1625843263.t1v-n-55481057-w-0.86544.3.v2 ADDED
Binary file (40 Bytes). View file
 
events.out.tfevents.1625843608.t1v-n-55481057-w-0.87879.3.v2 ADDED
Binary file (1.74 MB). View file
 
flax_model.msgpack CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9ec75c0b5de5daae130fea6477aa7a279e3b0f7bc4c0441ce45e526b49e60b4d
3
  size 1200715307
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1672bb6f04da617591d445601d1fa273deedb722149ff324265c8f1ee4ace542
3
  size 1200715307
run_translation_t5_flax.py CHANGED
@@ -430,8 +430,15 @@ def main():
430
 
431
  # Setting padding="max_length" as we need fixed length inputs for jitted functions
432
  def preprocess_function(examples):
433
- inputs = examples[text_column]
434
- targets = examples[summary_column]
 
 
 
 
 
 
 
435
  inputs = [prefix + inp for inp in inputs]
436
  model_inputs = tokenizer(
437
  inputs, max_length=data_args.max_source_length, padding="max_length", truncation=True, return_tensors="np"
 
430
 
431
  # Setting padding="max_length" as we need fixed length inputs for jitted functions
432
  def preprocess_function(examples):
433
+ #Make it two way
434
+ #inputs = examples[text_column]
435
+ inputs = examples[text_column][0:int(len(examples)/2)]
436
+ inputs.extend(examples[summary_column][int(len(examples)/2):])
437
+
438
+ #targets = examples[summary_column]
439
+ targets = examples[summary_column][0:int(len(examples)/2)]
440
+ targets.extend(examples[text_column][int(len(examples)/2):])
441
+
442
  inputs = [prefix + inp for inp in inputs]
443
  model_inputs = tokenizer(
444
  inputs, max_length=data_args.max_source_length, padding="max_length", truncation=True, return_tensors="np"