sharpenb's picture
Upload folder using huggingface_hub (#2)
19e7326 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 13.74486255645752,
"base_token_generation_latency_sync": 36.7140287399292,
"base_token_generation_latency_async": 37.803952395915985,
"base_token_generation_throughput_sync": 0.027237544729391865,
"base_token_generation_throughput_async": 0.02645226058712399,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 122.47920608520508,
"base_inference_latency_async": 43.92876625061035,
"base_inference_throughput_sync": 0.008164651224995125,
"base_inference_throughput_async": 0.02276412668398366,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 205742.3125,
"smashed_token_generation_latency_sync": 169.95355529785155,
"smashed_token_generation_latency_async": 170.05142513662577,
"smashed_token_generation_throughput_sync": 0.005883960463477526,
"smashed_token_generation_throughput_async": 0.005880574062796369,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 268.46238861083987,
"smashed_inference_latency_async": 212.37447261810303,
"smashed_inference_throughput_sync": 0.003724916570900325,
"smashed_inference_throughput_async": 0.0047086638411492346,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}