Upload data.jsonl with huggingface_hub
Browse files- data.jsonl +1 -0
data.jsonl
CHANGED
@@ -558,3 +558,4 @@
|
|
558 |
{"timestamp": "2024-02-16T21:27:05", "model name": "cardiffnlp/twitter-roberta-base-sentiment-latest", "dataset name": "ceval/ceval-exam", "run_id": "68ff0c55-56f8-4b97-8472-92c4cde815ad", "duration": 1.5117826461791992, "num_queries": 0, "emissions": 6.800615849067418e-05, "energy": 0.00018423144135, "region": "virginia", "gpu_count": 1, "gpu_model": "1 x NVIDIA A10G"}
|
559 |
{"timestamp": "2024-02-16T21:27:16", "model name": "cardiffnlp/twitter-roberta-base-sentiment", "dataset name": "ceval/ceval-exam", "run_id": "a6b72a41-b93e-4945-b36a-37e286b176f3", "duration": 1.4811890125274658, "num_queries": 0, "emissions": 6.624720111059284e-05, "energy": 0.0001794663544726, "region": "virginia", "gpu_count": 1, "gpu_model": "1 x NVIDIA A10G"}
|
560 |
{"timestamp": "2024-02-16T21:27:27", "model name": "CAMeL-Lab/bert-base-arabic-camelbert-da-sentiment", "dataset name": "ceval/ceval-exam", "run_id": "12fe0a30-e33c-400e-bb5e-c10a65f2afec", "duration": 1.4020962715148926, "num_queries": 0, "emissions": 6.632803619651675e-05, "energy": 0.0001796853399987, "region": "virginia", "gpu_count": 1, "gpu_model": "1 x NVIDIA A10G"}
|
|
|
|
558 |
{"timestamp": "2024-02-16T21:27:05", "model name": "cardiffnlp/twitter-roberta-base-sentiment-latest", "dataset name": "ceval/ceval-exam", "run_id": "68ff0c55-56f8-4b97-8472-92c4cde815ad", "duration": 1.5117826461791992, "num_queries": 0, "emissions": 6.800615849067418e-05, "energy": 0.00018423144135, "region": "virginia", "gpu_count": 1, "gpu_model": "1 x NVIDIA A10G"}
|
559 |
{"timestamp": "2024-02-16T21:27:16", "model name": "cardiffnlp/twitter-roberta-base-sentiment", "dataset name": "ceval/ceval-exam", "run_id": "a6b72a41-b93e-4945-b36a-37e286b176f3", "duration": 1.4811890125274658, "num_queries": 0, "emissions": 6.624720111059284e-05, "energy": 0.0001794663544726, "region": "virginia", "gpu_count": 1, "gpu_model": "1 x NVIDIA A10G"}
|
560 |
{"timestamp": "2024-02-16T21:27:27", "model name": "CAMeL-Lab/bert-base-arabic-camelbert-da-sentiment", "dataset name": "ceval/ceval-exam", "run_id": "12fe0a30-e33c-400e-bb5e-c10a65f2afec", "duration": 1.4020962715148926, "num_queries": 0, "emissions": 6.632803619651675e-05, "energy": 0.0001796853399987, "region": "virginia", "gpu_count": 1, "gpu_model": "1 x NVIDIA A10G"}
|
561 |
+
{"timestamp": "2024-02-16T21:27:36", "model name": "cross-encoder/ms-marco-MiniLM-L-6-v2", "dataset name": "ceval/ceval-exam", "run_id": "2dd47a43-d12f-472b-988c-7246a83b680d", "duration": 1.582749366760254, "num_queries": 0, "emissions": 5.532039805926678e-05, "energy": 0.0001498652018686, "region": "virginia", "gpu_count": 1, "gpu_model": "1 x NVIDIA A10G"}
|