hf-transformers-bot's picture
Upload folder using huggingface_hub
df108f2 verified
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.3.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"hub_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.29792,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.217-205.860.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.2.1",
"optimum_benchmark_commit": null,
"transformers_version": "4.42.0.dev0",
"transformers_commit": "940fde8dafaecb8f17b588c5078291f1c1a420c8",
"accelerate_version": "0.31.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.21.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.11.2.dev0",
"peft_commit": null
}
},
"report": {
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1754.882048,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5036.936192
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.05063795280456543,
"mean": 0.025318976402282714,
"stdev": 0.00370678424835205,
"p50": 0.025318976402282714,
"p90": 0.028284403800964357,
"p95": 0.02865508222579956,
"p99": 0.028951624965667725,
"values": [
0.029025760650634766,
0.021612192153930666
]
},
"throughput": {
"unit": "tokens/s",
"value": 276.4724722192518
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1754.898432,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5036.936704
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.07578759765625,
"mean": 2.537893798828125,
"stdev": 0.016163574218750032,
"p50": 2.537893798828125,
"p90": 2.550824658203125,
"p95": 2.552441015625,
"p99": 2.5537341015625,
"values": [
2.554057373046875,
2.521730224609375
]
},
"throughput": {
"unit": "tokens/s",
"value": 50.04149506123636
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 253,
"total": 5.075155963897701,
"mean": 0.020059904995643103,
"stdev": 0.0014278275260381915,
"p50": 0.019826688766479493,
"p90": 0.02054430694580078,
"p95": 0.020739276123046872,
"p99": 0.02157543487548828,
"values": [
0.022627328872680662,
0.021020671844482423,
0.020544511795043945,
0.020543487548828124,
0.020335615158081053,
0.020550655364990233,
0.020246528625488282,
0.02031718444824219,
0.0204083194732666,
0.020518911361694335,
0.020594688415527345,
0.02019327926635742,
0.02066227149963379,
0.020771839141845702,
0.021372928619384765,
0.021161983489990235,
0.02122444725036621,
0.02126643180847168,
0.02122444725036621,
0.020161535263061522,
0.02022707176208496,
0.020192256927490236,
0.02023321533203125,
0.020167680740356447,
0.020239360809326173,
0.020591615676879883,
0.02167500877380371,
0.020717567443847656,
0.02045952033996582,
0.020602880477905275,
0.020521984100341797,
0.02045337677001953,
0.020580352783203124,
0.020709375381469726,
0.02025164794921875,
0.020257791519165038,
0.020273151397705077,
0.020230144500732423,
0.020360191345214843,
0.02008064079284668,
0.020167680740356447,
0.02005299186706543,
0.02012057685852051,
0.02020147132873535,
0.020109312057495117,
0.01982771110534668,
0.020176895141601564,
0.019767295837402343,
0.019759103775024413,
0.019755008697509766,
0.019775487899780272,
0.019769344329833984,
0.019595264434814453,
0.019647487640380858,
0.01982975959777832,
0.019801088333129883,
0.01984614372253418,
0.01987379264831543,
0.019762176513671875,
0.019644416809082032,
0.01982361602783203,
0.01980723190307617,
0.01986662483215332,
0.019636224746704102,
0.019681280136108398,
0.019817472457885742,
0.019599359512329103,
0.019564544677734375,
0.01967103958129883,
0.019572736740112305,
0.019575807571411134,
0.019558399200439454,
0.019783679962158202,
0.019843072891235353,
0.019886079788208007,
0.019793920516967774,
0.019775487899780272,
0.0204902400970459,
0.020200447082519533,
0.020144128799438478,
0.020343807220458983,
0.020595712661743162,
0.020961280822753905,
0.01990553665161133,
0.01990656089782715,
0.019876863479614256,
0.01979903984069824,
0.019596288681030274,
0.019793920516967774,
0.019751935958862304,
0.019606527328491212,
0.01965056037902832,
0.019610624313354492,
0.019559423446655275,
0.019553279876708983,
0.019557376861572266,
0.019740671157836915,
0.019766271591186522,
0.01986867141723633,
0.01980620765686035,
0.019834880828857423,
0.019809280395507813,
0.01984921646118164,
0.019801088333129883,
0.01983692741394043,
0.019817472457885742,
0.019886079788208007,
0.01987276840209961,
0.019878911972045898,
0.019870719909667968,
0.01984511947631836,
0.020242431640625,
0.020107263565063475,
0.01983078384399414,
0.01987379264831543,
0.0198287353515625,
0.019886079788208007,
0.01982259178161621,
0.01982771110534668,
0.019813375473022463,
0.01982156753540039,
0.019810304641723633,
0.01981644821166992,
0.01981439971923828,
0.01980723190307617,
0.019783679962158202,
0.04166451263427735,
0.01987379264831543,
0.019862527847290038,
0.019834880828857423,
0.019786752700805665,
0.019805183410644533,
0.019777536392211914,
0.019795967102050782,
0.019811328887939454,
0.01982259178161621,
0.019787776947021486,
0.02024345588684082,
0.020603904724121092,
0.020533248901367186,
0.020537343978881836,
0.02063871955871582,
0.02053222465515137,
0.01979903984069824,
0.019817472457885742,
0.01970278358459473,
0.019588096618652344,
0.0196177921295166,
0.019552255630493166,
0.019568639755249022,
0.019595264434814453,
0.01965260887145996,
0.0214835205078125,
0.02063871955871582,
0.02020351982116699,
0.020353023529052734,
0.020377599716186523,
0.01962188720703125,
0.01964543914794922,
0.01963520050048828,
0.019554304122924804,
0.019586048126220702,
0.019582975387573243,
0.01964543914794922,
0.019557376861572266,
0.019583999633789064,
0.019590143203735352,
0.019586048126220702,
0.020165632247924805,
0.020113407135009767,
0.019574783325195313,
0.0196177921295166,
0.019580928802490235,
0.01963929557800293,
0.01982054328918457,
0.019802112579345704,
0.01979084777832031,
0.01963827133178711,
0.019581951141357423,
0.019619840621948242,
0.01963212776184082,
0.01959321594238281,
0.01963007926940918,
0.01963007926940918,
0.019542015075683594,
0.019540992736816407,
0.019590143203735352,
0.01987583923339844,
0.0198154239654541,
0.01962393569946289,
0.019594240188598632,
0.01964031982421875,
0.01964851188659668,
0.019706880569458008,
0.019746816635131836,
0.01965056037902832,
0.019614719390869142,
0.019843072891235353,
0.019784704208374023,
0.01982054328918457,
0.019810304641723633,
0.019804159164428712,
0.01969049644470215,
0.01964236831665039,
0.019566591262817384,
0.019572736740112305,
0.019590143203735352,
0.01964646339416504,
0.01982361602783203,
0.019794944763183595,
0.01984000015258789,
0.01985638427734375,
0.01981439971923828,
0.01987583923339844,
0.019862527847290038,
0.01986662483215332,
0.01978982353210449,
0.01985945510864258,
0.01982771110534668,
0.019819520950317384,
0.019763200759887696,
0.019982336044311523,
0.019780607223510743,
0.01983590316772461,
0.019834880828857423,
0.019802112579345704,
0.019785728454589844,
0.019955711364746095,
0.019951616287231445,
0.021193727493286133,
0.019974143981933593,
0.01985536003112793,
0.019812351226806642,
0.01983795166015625,
0.019770368576049805,
0.0198604793548584,
0.019805183410644533,
0.01984921646118164,
0.020358144760131838,
0.02044927978515625,
0.019826688766479493,
0.019886079788208007,
0.019861503601074217,
0.01987276840209961,
0.020435968399047853,
0.01982975959777832,
0.02002841567993164,
0.02020351982116699,
0.020168703079223634,
0.020420608520507814,
0.019812351226806642,
0.019755008697509766,
0.019535871505737306
]
},
"throughput": {
"unit": "tokens/s",
"value": 49.85068474737015
},
"energy": null,
"efficiency": null
}
}
}