hf-transformers-bot's picture
Upload folder using huggingface_hub
73258fa verified
[
{
"model": "google/gemma-2b",
"commit": "3f93fd06949f9eae58e50fd0c9b8e60be82643bc",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020061391830444336,
"prefill.throughput.value": 348.92893071242895,
"decode.latency.mean": 2.4978765869140624,
"decode.throughput.value": 50.84318443326253,
"per_token.latency.mean": 0.01974688067832012,
"per_token.throughput.value": 50.64090963480064
}
},
{
"model": "google/gemma-2b",
"commit": "3f93fd06949f9eae58e50fd0c9b8e60be82643bc",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02292073631286621,
"prefill.throughput.value": 305.40031107424136,
"decode.latency.mean": 2.63423046875,
"decode.throughput.value": 48.21142322458379,
"per_token.latency.mean": 0.020826120082568747,
"per_token.throughput.value": 48.01662508596548
}
},
{
"model": "google/gemma-2b",
"commit": "3f93fd06949f9eae58e50fd0c9b8e60be82643bc",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014083136081695556,
"prefill.throughput.value": 497.0483817946057,
"decode.latency.mean": 1.5619984741210937,
"decode.throughput.value": 81.30609735163821,
"per_token.latency.mean": 0.012352333894360206,
"per_token.throughput.value": 80.95636084259164
}
}
]