sharpenb's picture
Upload folder using huggingface_hub (#2)
01b8eea verified
raw
history blame
750 Bytes
{
"current_gpu_type": "NVIDIA A100-PCIE-40GB",
"current_gpu_total_memory": 40339.3125,
"perplexity": 9.12706470489502,
"token_generation_latency_sync": 18.37441167831421,
"token_generation_latency_async": 17.59982258081436,
"token_generation_throughput_sync": 0.05442351121261841,
"token_generation_throughput_async": 0.056818754587339086,
"token_generation_CO2_emissions": null,
"token_generation_energy_consumption": null,
"inference_latency_sync": 187.6491271972656,
"inference_latency_async": 46.02806568145752,
"inference_throughput_sync": 0.005329094864101088,
"inference_throughput_async": 0.021725874967690673,
"inference_CO2_emissions": null,
"inference_energy_consumption": null
}