marcelovidigal
commited on
Training in progress, epoch 11
Browse files- model.safetensors +1 -1
- wandb/debug-internal.log +0 -0
- wandb/run-20240924_172630-x9iddikd/files/output.log +1 -0
- wandb/run-20240924_172630-x9iddikd/files/wandb-summary.json +1 -1
- wandb/run-20240924_172630-x9iddikd/logs/debug-internal.log +0 -0
- wandb/run-20240924_172630-x9iddikd/run-x9iddikd.wandb +0 -0
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 267832560
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:49146c9cd541b88129d9ea09ad2c3a4b9a7a2ac95d7582414a1f4a01d681f002
|
3 |
size 267832560
|
wandb/debug-internal.log
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
wandb/run-20240924_172630-x9iddikd/files/output.log
CHANGED
@@ -42,3 +42,4 @@ You should probably TRAIN this model on a down-stream task to be able to use it
|
|
42 |
{'loss': 0.1013, 'grad_norm': 0.6478258371353149, 'learning_rate': 8.400000000000001e-06, 'epoch': 8.0}
|
43 |
{'eval_loss': 0.4580109715461731, 'eval_accuracy': 0.91, 'eval_runtime': 38.2702, 'eval_samples_per_second': 26.13, 'eval_steps_per_second': 0.836, 'epoch': 8.0}
|
44 |
{'eval_loss': 0.4977562129497528, 'eval_accuracy': 0.913, 'eval_runtime': 37.7002, 'eval_samples_per_second': 26.525, 'eval_steps_per_second': 0.849, 'epoch': 9.0}
|
|
|
|
42 |
{'loss': 0.1013, 'grad_norm': 0.6478258371353149, 'learning_rate': 8.400000000000001e-06, 'epoch': 8.0}
|
43 |
{'eval_loss': 0.4580109715461731, 'eval_accuracy': 0.91, 'eval_runtime': 38.2702, 'eval_samples_per_second': 26.13, 'eval_steps_per_second': 0.836, 'epoch': 8.0}
|
44 |
{'eval_loss': 0.4977562129497528, 'eval_accuracy': 0.913, 'eval_runtime': 37.7002, 'eval_samples_per_second': 26.525, 'eval_steps_per_second': 0.849, 'epoch': 9.0}
|
45 |
+
{'eval_loss': 0.4662289023399353, 'eval_accuracy': 0.92, 'eval_runtime': 38.4422, 'eval_samples_per_second': 26.013, 'eval_steps_per_second': 0.832, 'epoch': 10.0}
|
wandb/run-20240924_172630-x9iddikd/files/wandb-summary.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"eval/loss": 0.
|
|
|
1 |
+
{"eval/loss": 0.5506279468536377, "eval/accuracy": 0.901, "eval/runtime": 37.4907, "eval/samples_per_second": 26.673, "eval/steps_per_second": 0.854, "train/epoch": 11.0, "train/global_step": 1375, "_timestamp": 1727239149.997223, "_runtime": 29559.12431383133, "_step": 20, "train/loss": 0.1013, "train/grad_norm": 0.6478258371353149, "train/learning_rate": 8.400000000000001e-06, "train_runtime": 8026.8642, "train_samples_per_second": 2.492, "train_steps_per_second": 0.156, "total_flos": 2396475988298112.0, "train_loss": 0.11480112991333008}
|
wandb/run-20240924_172630-x9iddikd/logs/debug-internal.log
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
wandb/run-20240924_172630-x9iddikd/run-x9iddikd.wandb
CHANGED
Binary files a/wandb/run-20240924_172630-x9iddikd/run-x9iddikd.wandb and b/wandb/run-20240924_172630-x9iddikd/run-x9iddikd.wandb differ
|
|