|
{ |
|
"config": { |
|
"name": "pytorch_generate", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.4.0+cu121", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model_type": "gemma", |
|
"model": "google/gemma-2b", |
|
"processor": "google/gemma-2b", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": "static", |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": false, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": { |
|
"backend": "inductor", |
|
"mode": "reduce-overhead", |
|
"fullgraph": true |
|
}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 2, |
|
"duration": 0, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 7 |
|
}, |
|
"new_tokens": null, |
|
"memory": true, |
|
"latency": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 128, |
|
"min_new_tokens": 128, |
|
"do_sample": false |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"numactl": false, |
|
"numactl_kwargs": {}, |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " AMD EPYC 7R32", |
|
"cpu_count": 16, |
|
"cpu_ram_mb": 66697.261056, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.10.223-211.872.amzn2.x86_64-x86_64-with-glibc2.29", |
|
"processor": "x86_64", |
|
"python_version": "3.8.10", |
|
"gpu": [ |
|
"NVIDIA A10G" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 24146608128, |
|
"optimum_benchmark_version": "0.4.0", |
|
"optimum_benchmark_commit": null, |
|
"transformers_version": "4.45.0.dev0", |
|
"transformers_commit": "0a7af19f4dc868bafc82f35eb7e8d13bac87a594", |
|
"accelerate_version": "0.34.0.dev0", |
|
"accelerate_commit": null, |
|
"diffusers_version": null, |
|
"diffusers_commit": null, |
|
"optimum_version": "1.22.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "0.9.16", |
|
"timm_commit": null, |
|
"peft_version": "0.12.1.dev0", |
|
"peft_commit": null |
|
} |
|
}, |
|
"report": { |
|
"load": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1319.657472, |
|
"max_global_vram": 6768.033792, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6138.363904, |
|
"max_allocated": 6060.931072 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 1, |
|
"total": 12.466306640625, |
|
"mean": 12.466306640625, |
|
"stdev": 0.0, |
|
"p50": 12.466306640625, |
|
"p90": 12.466306640625, |
|
"p95": 12.466306640625, |
|
"p99": 12.466306640625, |
|
"values": [ |
|
12.466306640625 |
|
] |
|
}, |
|
"throughput": null, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1751.515136, |
|
"max_global_vram": 6789.005312, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6142.558208, |
|
"max_allocated": 5028.450816 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 0.04397520065307617, |
|
"mean": 0.021987600326538084, |
|
"stdev": 6.532859802245959e-05, |
|
"p50": 0.021987600326538084, |
|
"p90": 0.022039863204956055, |
|
"p95": 0.0220463960647583, |
|
"p99": 0.022051622352600095, |
|
"values": [ |
|
0.021922271728515626, |
|
0.022052928924560546 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 318.36125343570586 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1776.451584, |
|
"max_global_vram": 6793.199616, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6146.752512, |
|
"max_allocated": 5031.820288 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 5.263006591796875, |
|
"mean": 2.6315032958984377, |
|
"stdev": 0.007253051757812434, |
|
"p50": 2.6315032958984377, |
|
"p90": 2.6373057373046875, |
|
"p95": 2.6380310424804687, |
|
"p99": 2.6386112866210936, |
|
"values": [ |
|
2.63875634765625, |
|
2.624250244140625 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 48.26138739706201 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"count": 254, |
|
"total": 5.262742538452147, |
|
"mean": 0.020719458812803734, |
|
"stdev": 0.00044267072336821644, |
|
"p50": 0.020717055320739745, |
|
"p90": 0.02118236083984375, |
|
"p95": 0.021277439880371094, |
|
"p99": 0.021831383533477782, |
|
"values": [ |
|
0.02032640075683594, |
|
0.02064384078979492, |
|
0.021275648117065428, |
|
0.021175296783447265, |
|
0.021210111618041993, |
|
0.021155839920043946, |
|
0.021308416366577147, |
|
0.02120806312561035, |
|
0.021172224044799806, |
|
0.021151744842529296, |
|
0.021117952346801756, |
|
0.021207040786743164, |
|
0.021163007736206055, |
|
0.020986879348754883, |
|
0.0210882568359375, |
|
0.021126144409179686, |
|
0.02110361671447754, |
|
0.02109542465209961, |
|
0.021130239486694336, |
|
0.021177343368530274, |
|
0.02109132766723633, |
|
0.02107904052734375, |
|
0.021146623611450196, |
|
0.021114879608154297, |
|
0.02084864044189453, |
|
0.02122854423522949, |
|
0.02161664009094238, |
|
0.021186559677124024, |
|
0.02108006477355957, |
|
0.021038080215454103, |
|
0.021121023178100586, |
|
0.020273151397705077, |
|
0.020306943893432617, |
|
0.020307968139648438, |
|
0.02030899238586426, |
|
0.020144128799438478, |
|
0.020108287811279296, |
|
0.020162559509277343, |
|
0.02011238479614258, |
|
0.020091903686523437, |
|
0.020340736389160157, |
|
0.02065203285217285, |
|
0.02030899238586426, |
|
0.021086208343505858, |
|
0.0210882568359375, |
|
0.021109760284423826, |
|
0.021090303421020508, |
|
0.02027622413635254, |
|
0.02026905632019043, |
|
0.02087321662902832, |
|
0.021164031982421876, |
|
0.02124083137512207, |
|
0.021137407302856445, |
|
0.02027008056640625, |
|
0.02028441619873047, |
|
0.020445184707641603, |
|
0.020993024826049804, |
|
0.022408191680908202, |
|
0.022157312393188477, |
|
0.0212807674407959, |
|
0.02112512016296387, |
|
0.02105855941772461, |
|
0.020356096267700196, |
|
0.020295743942260743, |
|
0.020277183532714845, |
|
0.020288511276245116, |
|
0.0206059513092041, |
|
0.021113855361938477, |
|
0.02105036735534668, |
|
0.020339712142944336, |
|
0.02031001663208008, |
|
0.02027519989013672, |
|
0.02026393508911133, |
|
0.020279296875, |
|
0.020583423614501953, |
|
0.02128691291809082, |
|
0.02143129539489746, |
|
0.02123980712890625, |
|
0.02107187271118164, |
|
0.021009408950805664, |
|
0.02027622413635254, |
|
0.020287488937377928, |
|
0.02030182456970215, |
|
0.02026188850402832, |
|
0.020273151397705077, |
|
0.02027724838256836, |
|
0.020306943893432617, |
|
0.020271104812622072, |
|
0.02029363250732422, |
|
0.02165452766418457, |
|
0.021990400314331054, |
|
0.021184511184692383, |
|
0.021158912658691405, |
|
0.020355072021484375, |
|
0.020740095138549804, |
|
0.02109644889831543, |
|
0.02106572723388672, |
|
0.0210831356048584, |
|
0.021098495483398438, |
|
0.02120806312561035, |
|
0.021122047424316406, |
|
0.02105241584777832, |
|
0.02025369644165039, |
|
0.020264959335327147, |
|
0.02027519989013672, |
|
0.02024550437927246, |
|
0.02028339195251465, |
|
0.02066739273071289, |
|
0.021009408950805664, |
|
0.020298751831054687, |
|
0.02169036865234375, |
|
0.021412864685058593, |
|
0.021389312744140625, |
|
0.020339712142944336, |
|
0.02028441619873047, |
|
0.02028544044494629, |
|
0.020324352264404297, |
|
0.020256767272949217, |
|
0.020303871154785155, |
|
0.02028646469116211, |
|
0.020332544326782227, |
|
0.02069196891784668, |
|
0.02036735916137695, |
|
0.02025164794921875, |
|
0.02028339195251465, |
|
0.02027212715148926, |
|
0.020271104812622072, |
|
0.020281343460083007, |
|
0.02025984001159668, |
|
0.020246528625488282, |
|
0.02028646469116211, |
|
0.020246528625488282, |
|
0.020289535522460937, |
|
0.02025574493408203, |
|
0.02025062370300293, |
|
0.020231168746948244, |
|
0.02109542465209961, |
|
0.021006336212158205, |
|
0.02027724838256836, |
|
0.020986879348754883, |
|
0.02104115104675293, |
|
0.020986879348754883, |
|
0.020993024826049804, |
|
0.02083737564086914, |
|
0.020146175384521483, |
|
0.020607999801635742, |
|
0.020987903594970703, |
|
0.02110361671447754, |
|
0.021143552780151367, |
|
0.020290559768676757, |
|
0.02029779243469238, |
|
0.020270015716552733, |
|
0.02103193664550781, |
|
0.021126144409179686, |
|
0.02026905632019043, |
|
0.020239360809326173, |
|
0.02021683120727539, |
|
0.02030080032348633, |
|
0.020584447860717774, |
|
0.020995071411132812, |
|
0.021019647598266602, |
|
0.02028441619873047, |
|
0.02028646469116211, |
|
0.02028544044494629, |
|
0.020249599456787108, |
|
0.020985855102539062, |
|
0.021112831115722656, |
|
0.02107699203491211, |
|
0.020354047775268554, |
|
0.020624383926391602, |
|
0.020282367706298828, |
|
0.020556800842285155, |
|
0.02106982421875, |
|
0.021140480041503908, |
|
0.021109760284423826, |
|
0.021045248031616212, |
|
0.020981760025024415, |
|
0.021032960891723632, |
|
0.021137407302856445, |
|
0.020967424392700194, |
|
0.021020671844482423, |
|
0.021020671844482423, |
|
0.021001216888427734, |
|
0.020262912750244142, |
|
0.02124492835998535, |
|
0.02104319953918457, |
|
0.02103398323059082, |
|
0.02102374458312988, |
|
0.021038080215454103, |
|
0.02106470489501953, |
|
0.021176319122314453, |
|
0.02110771179199219, |
|
0.02128486442565918, |
|
0.02122854423522949, |
|
0.020331520080566406, |
|
0.020814847946166993, |
|
0.020702207565307617, |
|
0.020967424392700194, |
|
0.020340736389160157, |
|
0.020337663650512695, |
|
0.020306943893432617, |
|
0.020254720687866212, |
|
0.02027212715148926, |
|
0.020428800582885744, |
|
0.02106675148010254, |
|
0.02104832077026367, |
|
0.021038080215454103, |
|
0.02026905632019043, |
|
0.02026700782775879, |
|
0.02026188850402832, |
|
0.020908031463623047, |
|
0.02109235191345215, |
|
0.020312063217163084, |
|
0.02029363250732422, |
|
0.020222976684570314, |
|
0.02027622413635254, |
|
0.020248575210571287, |
|
0.021013504028320314, |
|
0.021006336212158205, |
|
0.02105753517150879, |
|
0.02029363250732422, |
|
0.020341760635375978, |
|
0.020291584014892578, |
|
0.020892671585083008, |
|
0.020265983581542968, |
|
0.020315135955810547, |
|
0.020298751831054687, |
|
0.020289535522460937, |
|
0.02033459281921387, |
|
0.020205568313598633, |
|
0.020762624740600585, |
|
0.020731903076171874, |
|
0.020634624481201173, |
|
0.021153791427612305, |
|
0.02106265640258789, |
|
0.02104115104675293, |
|
0.021112831115722656, |
|
0.02106879997253418, |
|
0.021004287719726563, |
|
0.020297727584838866, |
|
0.02019430351257324, |
|
0.02023526382446289, |
|
0.02026393508911133, |
|
0.02126848030090332, |
|
0.021097471237182617, |
|
0.021089279174804687, |
|
0.020982784271240236, |
|
0.02025984001159668, |
|
0.020321279525756835, |
|
0.02031001663208008, |
|
0.020281343460083007, |
|
0.0202926082611084, |
|
0.020301855087280274, |
|
0.02033558464050293 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 48.26380886850399 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |