hf-transformers-bot's picture
Upload folder using huggingface_hub
cb8bde1 verified
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.4.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model_type": "gemma",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.261056,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.4.0",
"optimum_benchmark_commit": null,
"transformers_version": "4.45.0.dev0",
"transformers_commit": "ecd61c62862f925a18b4f063dc17fcaf01826e25",
"accelerate_version": "0.35.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.22.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.12.1.dev0",
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 1320.42752,
"max_global_vram": 6768.033792,
"max_process_vram": 0.0,
"max_reserved": 6138.363904,
"max_allocated": 6060.931072
},
"latency": {
"unit": "s",
"count": 1,
"total": 12.1005283203125,
"mean": 12.1005283203125,
"stdev": 0.0,
"p50": 12.1005283203125,
"p90": 12.1005283203125,
"p95": 12.1005283203125,
"p99": 12.1005283203125,
"values": [
12.1005283203125
]
},
"throughput": null,
"energy": null,
"efficiency": null
},
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1759.68256,
"max_global_vram": 6789.005312,
"max_process_vram": 0.0,
"max_reserved": 6142.558208,
"max_allocated": 5028.450816
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.04429404830932617,
"mean": 0.022147024154663086,
"stdev": 1.246452331542941e-05,
"p50": 0.022147024154663086,
"p90": 0.022156995773315428,
"p95": 0.02215824222564697,
"p99": 0.022159239387512207,
"values": [
0.022134559631347656,
0.022159488677978515
]
},
"throughput": {
"unit": "tokens/s",
"value": 316.0695518781985
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1784.50432,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5031.820288
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.34977685546875,
"mean": 2.674888427734375,
"stdev": 0.01889965820312489,
"p50": 2.674888427734375,
"p90": 2.690008154296875,
"p95": 2.6918981201171874,
"p99": 2.6934100927734375,
"values": [
2.6937880859375,
2.65598876953125
]
},
"throughput": {
"unit": "tokens/s",
"value": 47.4786158118635
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 254,
"total": 5.349518325805665,
"mean": 0.021061095770888442,
"stdev": 0.0005321484696243915,
"p50": 0.021248000144958496,
"p90": 0.021543628501892093,
"p95": 0.02170981273651123,
"p99": 0.022289735736846925,
"values": [
0.0204769287109375,
0.020527103424072265,
0.020510719299316405,
0.02047283172607422,
0.021644287109375,
0.021410816192626952,
0.021541887283325196,
0.020530176162719727,
0.020501504898071288,
0.020538368225097657,
0.02149171257019043,
0.02126131248474121,
0.02047590446472168,
0.02063974380493164,
0.021182464599609374,
0.020529151916503906,
0.021366783142089844,
0.021428224563598632,
0.02147635269165039,
0.021448703765869142,
0.02203647994995117,
0.021695487976074217,
0.020497407913208008,
0.020488191604614257,
0.020452352523803712,
0.02047385597229004,
0.02051584053039551,
0.02144972801208496,
0.020505599975585938,
0.02043903923034668,
0.02045952033996582,
0.022374399185180666,
0.0214783992767334,
0.02144767951965332,
0.021497856140136717,
0.02145792007446289,
0.02147225570678711,
0.021424127578735352,
0.02144256019592285,
0.02146816062927246,
0.02148044776916504,
0.021497856140136717,
0.02149580764770508,
0.021497856140136717,
0.021537792205810546,
0.02149990463256836,
0.02148454475402832,
0.021399551391601563,
0.021412864685058593,
0.021373952865600586,
0.02147123146057129,
0.021386240005493166,
0.021448703765869142,
0.02145996856689453,
0.021411840438842773,
0.02145792007446289,
0.021421056747436523,
0.021436416625976562,
0.021381120681762695,
0.02110054397583008,
0.021544960021972655,
0.0214466552734375,
0.021792768478393554,
0.021614591598510743,
0.02144256019592285,
0.02146201515197754,
0.021465087890625,
0.02166067123413086,
0.021526527404785157,
0.02143129539489746,
0.021367807388305664,
0.021440511703491212,
0.02144358444213867,
0.021126144409179686,
0.02143129539489746,
0.02145075225830078,
0.021440511703491212,
0.021507072448730468,
0.021543935775756837,
0.02146713638305664,
0.02145382308959961,
0.021567487716674806,
0.02150912094116211,
0.021391424179077148,
0.02141279983520508,
0.021583871841430666,
0.02149478340148926,
0.021441535949707033,
0.0214466552734375,
0.021420032501220702,
0.021555200576782226,
0.02150399971008301,
0.02142310333251953,
0.02112512016296387,
0.021166080474853514,
0.021135360717773437,
0.020295679092407228,
0.020249599456787108,
0.020312063217163084,
0.0202608642578125,
0.02028339195251465,
0.020354047775268554,
0.02038374328613281,
0.02026188850402832,
0.02025574493408203,
0.020575231552124023,
0.022740991592407226,
0.022490144729614258,
0.02173641586303711,
0.02061414337158203,
0.021212160110473634,
0.022214656829833986,
0.021737472534179687,
0.02119987106323242,
0.021744640350341796,
0.021545984268188476,
0.021517311096191406,
0.02147327995300293,
0.021537792205810546,
0.021366783142089844,
0.02127462387084961,
0.02042982482910156,
0.020405248641967775,
0.020405248641967775,
0.020435968399047853,
0.02045747184753418,
0.020463615417480468,
0.02041753578186035,
0.020397056579589845,
0.020764671325683593,
0.02145587158203125,
0.02142207908630371,
0.02149273681640625,
0.021436416625976562,
0.021501951217651367,
0.02142310333251953,
0.02146816062927246,
0.02184499168395996,
0.021646335601806642,
0.021505023956298826,
0.02147327995300293,
0.02141900825500488,
0.02143539237976074,
0.02142617607116699,
0.02049126434326172,
0.02046566390991211,
0.020468736648559572,
0.020455423355102538,
0.020487167358398437,
0.020479999542236327,
0.020533248901367186,
0.02044927978515625,
0.02027827262878418,
0.020376575469970702,
0.021029888153076173,
0.021120000839233398,
0.021089279174804687,
0.02122444725036621,
0.021355520248413085,
0.021918720245361328,
0.021326847076416015,
0.02148659133911133,
0.02149171257019043,
0.021300224304199217,
0.021542911529541017,
0.0214783992767334,
0.021153791427612305,
0.021406719207763672,
0.021444608688354492,
0.02045644760131836,
0.02046668815612793,
0.020376575469970702,
0.020271104812622072,
0.020470783233642577,
0.020385791778564453,
0.02027622413635254,
0.020304895401000975,
0.020477951049804686,
0.02045337677001953,
0.02052403259277344,
0.021368831634521485,
0.021398527145385742,
0.021383167266845703,
0.021441535949707033,
0.021385215759277345,
0.020273151397705077,
0.02030899238586426,
0.020925439834594727,
0.021234687805175782,
0.021184511184692383,
0.021488672256469728,
0.02154185676574707,
0.02143129539489746,
0.021132287979125978,
0.021147647857666017,
0.021139455795288087,
0.02128691291809082,
0.02122956848144531,
0.021133312225341795,
0.020313087463378905,
0.02085273551940918,
0.020739072799682616,
0.02149580764770508,
0.02146611213684082,
0.02026393508911133,
0.020289535522460937,
0.020281343460083007,
0.020722688674926756,
0.021786624908447266,
0.021776384353637695,
0.021613567352294923,
0.021569536209106444,
0.020520959854125977,
0.02044825553894043,
0.02047385597229004,
0.02049228858947754,
0.020458528518676758,
0.020427743911743165,
0.02043391990661621,
0.020418560028076172,
0.02043801689147949,
0.02047488021850586,
0.020418560028076172,
0.02043903923034668,
0.020419584274291993,
0.020393983840942383,
0.020239360809326173,
0.020237312316894532,
0.02023321533203125,
0.020249599456787108,
0.02024550437927246,
0.02047385597229004,
0.020419584274291993,
0.02045030403137207,
0.020220928192138672,
0.020505599975585938,
0.021345279693603517,
0.021424127578735352,
0.021147647857666017,
0.021146623611450196,
0.021173248291015623,
0.021191680908203125,
0.021120000839233398,
0.021168127059936523,
0.021112831115722656,
0.02110873603820801,
0.021136383056640624,
0.02026905632019043,
0.02024140739440918,
0.020362239837646484,
0.021352447509765626,
0.02127359962463379,
0.021167104721069335,
0.021130239486694336
]
},
"throughput": {
"unit": "tokens/s",
"value": 47.48091034191314
},
"energy": null,
"efficiency": null
}
}
}