sharpenb's picture
Upload folder using huggingface_hub (#2)
90ce1d2 verified
raw
history blame contribute delete
No virus
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 82.86221313476562,
"base_token_generation_latency_sync": 20.888434410095215,
"base_token_generation_latency_async": 20.86069267243147,
"base_token_generation_throughput_sync": 0.04787338200495811,
"base_token_generation_throughput_async": 0.04793704675595715,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 40.887705612182614,
"base_inference_latency_async": 17.732644081115723,
"base_inference_throughput_sync": 0.024457229502798192,
"base_inference_throughput_async": 0.05639316931110935,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 92.91635131835938,
"smashed_token_generation_latency_sync": 24.19665660858154,
"smashed_token_generation_latency_async": 24.469297379255295,
"smashed_token_generation_throughput_sync": 0.04132802379173914,
"smashed_token_generation_throughput_async": 0.040867540432435345,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 52.49914817810058,
"smashed_inference_latency_async": 26.189470291137695,
"smashed_inference_throughput_sync": 0.019047928103662803,
"smashed_inference_throughput_async": 0.038183284689740055,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}