sharpenb's picture
Upload folder using huggingface_hub (#2)
4a4524c verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.405904769897461,
"base_token_generation_latency_sync": 40.136288452148435,
"base_token_generation_latency_async": 39.61505014449358,
"base_token_generation_throughput_sync": 0.02491510895912129,
"base_token_generation_throughput_async": 0.02524293157152543,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 120.26163177490234,
"base_inference_latency_async": 39.05525207519531,
"base_inference_throughput_sync": 0.008315203986852041,
"base_inference_throughput_async": 0.025604750881511217,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 111729.4453125,
"smashed_token_generation_latency_sync": 176.8026123046875,
"smashed_token_generation_latency_async": 178.42851113528013,
"smashed_token_generation_throughput_sync": 0.005656025026806052,
"smashed_token_generation_throughput_async": 0.005604485480696661,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 271.8605331420898,
"smashed_inference_latency_async": 233.8700771331787,
"smashed_inference_throughput_sync": 0.0036783566501627615,
"smashed_inference_throughput_async": 0.004275878352024249,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}