sharpenb's picture
Upload folder using huggingface_hub (#1)
ecdead8 verified
raw
history blame
1.49 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 84.122998046875,
"base_token_generation_latency_async": 83.76936465501785,
"base_token_generation_throughput_sync": 0.011887355696034278,
"base_token_generation_throughput_async": 0.011937538312702235,
"base_token_generation_CO2_emissions": 2.1414779832790826e-05,
"base_token_generation_energy_consumption": 0.00677001766039976,
"base_inference_latency_sync": 80.54784088134765,
"base_inference_latency_async": 79.3264389038086,
"base_inference_throughput_sync": 0.012414982066037831,
"base_inference_throughput_async": 0.0126061375478181,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 70.89783020019532,
"smashed_token_generation_latency_async": 71.6143250465393,
"smashed_token_generation_throughput_sync": 0.014104804014118406,
"smashed_token_generation_throughput_async": 0.013963686725388247,
"smashed_token_generation_CO2_emissions": 3.5712334115674065e-05,
"smashed_token_generation_energy_consumption": 0.005783166932357252,
"smashed_inference_latency_sync": 126.31818389892578,
"smashed_inference_latency_async": 124.18899536132812,
"smashed_inference_throughput_sync": 0.007916516602234844,
"smashed_inference_throughput_async": 0.008052243253039434
}