hf-transformers-bot's picture
Upload folder using huggingface_hub
1f148af verified
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.4.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model_type": "gemma",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.261056,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.4.0",
"optimum_benchmark_commit": null,
"transformers_version": "4.45.0.dev0",
"transformers_commit": "f38590dade57c1f8cf8a67e9409dae8935f8c478",
"accelerate_version": "0.35.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.22.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.12.1.dev0",
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 1335.816192,
"max_global_vram": 6775.373824,
"max_process_vram": 0.0,
"max_reserved": 6138.363904,
"max_allocated": 6060.931072
},
"latency": {
"unit": "s",
"count": 1,
"total": 11.4623056640625,
"mean": 11.4623056640625,
"stdev": 0.0,
"p50": 11.4623056640625,
"p90": 11.4623056640625,
"p95": 11.4623056640625,
"p99": 11.4623056640625,
"values": [
11.4623056640625
]
},
"throughput": null,
"energy": null,
"efficiency": null
},
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1816.403968,
"max_global_vram": 6796.345344,
"max_process_vram": 0.0,
"max_reserved": 6142.558208,
"max_allocated": 5028.450816
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.04521712112426758,
"mean": 0.02260856056213379,
"stdev": 0.0007233123779296861,
"p50": 0.02260856056213379,
"p90": 0.02318721046447754,
"p95": 0.023259541702270507,
"p99": 0.023317406692504882,
"values": [
0.023331872940063475,
0.021885248184204103
]
},
"throughput": {
"unit": "tokens/s",
"value": 309.6172346205902
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1842.81088,
"max_global_vram": 6800.539648,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5031.820288
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.410896728515625,
"mean": 2.7054483642578124,
"stdev": 0.001650756835937628,
"p50": 2.7054483642578124,
"p90": 2.7067689697265624,
"p95": 2.7069340454101565,
"p99": 2.7070661059570313,
"values": [
2.70709912109375,
2.703797607421875
]
},
"throughput": {
"unit": "tokens/s",
"value": 46.942311550950635
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 254,
"total": 5.41062961959839,
"mean": 0.02130169141574169,
"stdev": 0.000305934813687003,
"p50": 0.021341184616088867,
"p90": 0.02147420234680176,
"p95": 0.021516901969909668,
"p99": 0.022151598472595214,
"values": [
0.021339136123657225,
0.021739519119262696,
0.021363712310791014,
0.02131455993652344,
0.021300224304199217,
0.02145894432067871,
0.021415935516357423,
0.021098495483398438,
0.02103910446166992,
0.02130636787414551,
0.02142310333251953,
0.021365760803222656,
0.021592063903808592,
0.021375999450683594,
0.021345279693603517,
0.021369855880737306,
0.021421056747436523,
0.02124492835998535,
0.021271551132202148,
0.021177343368530274,
0.021324800491333007,
0.021410816192626952,
0.021356544494628905,
0.021335039138793945,
0.021378047943115236,
0.021168127059936523,
0.019973119735717772,
0.02004684829711914,
0.020130815505981444,
0.02088960075378418,
0.02148044776916504,
0.0214783992767334,
0.021374975204467773,
0.021531648635864258,
0.021335039138793945,
0.021352447509765626,
0.021527551651000978,
0.021361663818359376,
0.021386240005493166,
0.021129215240478515,
0.021139455795288087,
0.021400575637817384,
0.021375999450683594,
0.021317632675170898,
0.02146201515197754,
0.021345279693603517,
0.02139958381652832,
0.02146505546569824,
0.021403648376464843,
0.022252544403076172,
0.02149990463256836,
0.021332992553710937,
0.021361663818359376,
0.02128179168701172,
0.021342208862304687,
0.021375999450683594,
0.021227519989013673,
0.021234687805175782,
0.021368831634521485,
0.021308416366577147,
0.021363712310791014,
0.021395456314086913,
0.021361663818359376,
0.021481472015380858,
0.021134336471557616,
0.021308416366577147,
0.021327871322631836,
0.021324800491333007,
0.021317632675170898,
0.021311487197875977,
0.02148454475402832,
0.021332992553710937,
0.021345279693603517,
0.021318687438964843,
0.021431264877319337,
0.021340160369873046,
0.021493759155273438,
0.021378047943115236,
0.02129408073425293,
0.0213309440612793,
0.021387264251708983,
0.021304319381713867,
0.021377023696899415,
0.021498880386352538,
0.02141900825500488,
0.021363712310791014,
0.0214783992767334,
0.02129408073425293,
0.021354496002197267,
0.02146918487548828,
0.021375999450683594,
0.021339136123657225,
0.02130636787414551,
0.021328895568847657,
0.021379072189331053,
0.02188287925720215,
0.021613567352294923,
0.021420032501220702,
0.021389312744140625,
0.021335039138793945,
0.021361663818359376,
0.021323776245117186,
0.02144256019592285,
0.021363712310791014,
0.02127769660949707,
0.02126848030090332,
0.021359615325927735,
0.021309440612792968,
0.02126438331604004,
0.02137913513183594,
0.02017990493774414,
0.021375999450683594,
0.021325824737548828,
0.021319679260253906,
0.02246963119506836,
0.021511167526245118,
0.02130636787414551,
0.021238784790039062,
0.021313535690307618,
0.020242431640625,
0.02030899238586426,
0.02127359962463379,
0.021279743194580078,
0.021372928619384765,
0.021222400665283202,
0.021331968307495116,
0.021320703506469727,
0.021380096435546874,
0.02128691291809082,
0.02143129539489746,
0.021395456314086913,
0.021275648117065428,
0.02127052879333496,
0.021378047943115236,
0.020922367095947265,
0.021295103073120117,
0.021328895568847657,
0.021238784790039062,
0.0212674560546875,
0.021369855880737306,
0.021226495742797852,
0.021391359329223633,
0.021398527145385742,
0.021320703506469727,
0.02131455993652344,
0.021361663818359376,
0.02132275199890137,
0.021325824737548828,
0.021402624130249022,
0.021331968307495116,
0.021385215759277345,
0.021319679260253906,
0.02125721549987793,
0.021285888671875,
0.021341184616088867,
0.021403648376464843,
0.02126643180847168,
0.021444608688354492,
0.021327871322631836,
0.021351423263549805,
0.021414911270141602,
0.02128895950317383,
0.02129715156555176,
0.021392383575439454,
0.021246976852416992,
0.020117504119873047,
0.020996095657348633,
0.02128179168701172,
0.02127052879333496,
0.021507072448730468,
0.02125823974609375,
0.021301248550415038,
0.02125619125366211,
0.021275648117065428,
0.02124595260620117,
0.02129100799560547,
0.0212992000579834,
0.02129715156555176,
0.02127769660949707,
0.02129408073425293,
0.021285888671875,
0.021341184616088867,
0.02127257537841797,
0.021390335083007812,
0.021399551391601563,
0.02130636787414551,
0.021720064163208007,
0.021575679779052736,
0.02148863983154297,
0.021362688064575194,
0.021335039138793945,
0.021367807388305664,
0.021391359329223633,
0.021347328186035155,
0.021334016799926758,
0.021319679260253906,
0.021369855880737306,
0.021344255447387696,
0.0212674560546875,
0.021377023696899415,
0.021379072189331053,
0.02206208038330078,
0.022304767608642577,
0.02147635269165039,
0.021405696868896484,
0.02142310333251953,
0.021308416366577147,
0.0212992000579834,
0.021384191513061524,
0.020121599197387697,
0.020192256927490236,
0.02081996726989746,
0.02011136054992676,
0.020116479873657226,
0.020560895919799805,
0.021334016799926758,
0.021394432067871092,
0.021175296783447265,
0.02128486442565918,
0.021331968307495116,
0.021390335083007812,
0.021358591079711914,
0.021303295135498047,
0.021402624130249022,
0.021404672622680664,
0.020973567962646485,
0.021375999450683594,
0.021325824737548828,
0.02132275199890137,
0.021424127578735352,
0.021337087631225587,
0.021347328186035155,
0.021424127578735352,
0.021359615325927735,
0.021353471755981446,
0.0214783992767334,
0.021375999450683594,
0.021365760803222656,
0.021355520248413085,
0.02142310333251953,
0.021362688064575194,
0.021404672622680664,
0.021316608428955077,
0.021320703506469727,
0.02167193603515625,
0.021320703506469727,
0.021350400924682617,
0.021432319641113282,
0.02129715156555176,
0.020349952697753908,
0.021392383575439454,
0.021369855880737306,
0.0212807674407959,
0.021377023696899415
]
},
"throughput": {
"unit": "tokens/s",
"value": 46.94462897256188
},
"energy": null,
"efficiency": null
}
}
}