sharpenb's picture
Upload folder using huggingface_hub (#1)
bd8ca02 verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.891793251037598,
"base_token_generation_latency_sync": 38.249277877807614,
"base_token_generation_latency_async": 39.28140439093113,
"base_token_generation_throughput_sync": 0.026144284427921292,
"base_token_generation_throughput_async": 0.025457338287804936,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 116.94233551025391,
"base_inference_latency_async": 38.52105140686035,
"base_inference_throughput_sync": 0.008551223093302395,
"base_inference_throughput_async": 0.025959831403301378,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.57605266571045,
"smashed_token_generation_latency_sync": 63.63798370361328,
"smashed_token_generation_latency_async": 64.45870269089937,
"smashed_token_generation_throughput_sync": 0.015713885667047324,
"smashed_token_generation_throughput_async": 0.015513808969990104,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 192.26040344238282,
"smashed_inference_latency_async": 109.18171405792236,
"smashed_inference_throughput_sync": 0.005201279005428089,
"smashed_inference_throughput_async": 0.009159042872962103,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}