hf-transformers-bot's picture
Upload folder using huggingface_hub
7337ad3 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "730a440734e1fb47c903c17e3231dac18e3e5fd6",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.01984868812561035,
"prefill.throughput.value": 352.6681438945098,
"decode.latency.mean": 2.4443394775390628,
"decode.throughput.value": 51.95677653083702,
"per_token.latency.mean": 0.019322965086684397,
"per_token.throughput.value": 51.75189188170234
}
},
{
"model": "google/gemma-2b",
"commit": "730a440734e1fb47c903c17e3231dac18e3e5fd6",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021841631889343262,
"prefill.throughput.value": 320.48887351752165,
"decode.latency.mean": 2.5444388427734377,
"decode.throughput.value": 49.91277363993156,
"per_token.latency.mean": 0.020118487505102346,
"per_token.throughput.value": 49.705525812831866
}
},
{
"model": "google/gemma-2b",
"commit": "730a440734e1fb47c903c17e3231dac18e3e5fd6",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014077072143554688,
"prefill.throughput.value": 497.26249383505586,
"decode.latency.mean": 1.574309814453125,
"decode.throughput.value": 80.67027140024314,
"per_token.latency.mean": 0.012449383215470749,
"per_token.throughput.value": 80.32526452855174
}
}
]