sharpenb's picture
Upload folder using huggingface_hub (#1)
cb0e31f verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 7.0558295249938965,
"base_token_generation_latency_sync": 33.31762771606445,
"base_token_generation_latency_async": 33.3708219230175,
"base_token_generation_throughput_sync": 0.030014141718674624,
"base_token_generation_throughput_async": 0.02996629817230395,
"base_token_generation_CO2_emissions": 7.696398767204138e-05,
"base_token_generation_energy_consumption": 0.00695694452693372,
"base_inference_latency_sync": 120.45148162841797,
"base_inference_latency_async": 39.07821178436279,
"base_inference_throughput_sync": 0.008302097960778186,
"base_inference_throughput_async": 0.025589707264961174,
"base_inference_CO2_emissions": 7.779396700861304e-05,
"base_inference_energy_consumption": 0.00021249855258619754,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 7.06295108795166,
"smashed_token_generation_latency_sync": 250.25841217041017,
"smashed_token_generation_latency_async": 250.2531660720706,
"smashed_token_generation_throughput_sync": 0.003995869674578864,
"smashed_token_generation_throughput_async": 0.0039959534406530115,
"smashed_token_generation_CO2_emissions": 0.00022890042868949087,
"smashed_token_generation_energy_consumption": 0.059824871149179795,
"smashed_inference_latency_sync": 345.61167602539064,
"smashed_inference_latency_async": 225.31085014343262,
"smashed_inference_throughput_sync": 0.00289342076488913,
"smashed_inference_throughput_async": 0.004438312666093982,
"smashed_inference_CO2_emissions": 0.00022977354680803487,
"smashed_inference_energy_consumption": 0.0006239773613275945
}