|
{ |
|
"config": { |
|
"name": "pytorch_generate", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.4.0+cu121", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model_type": "gemma", |
|
"model": "google/gemma-2b", |
|
"processor": "google/gemma-2b", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": "static", |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": false, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": { |
|
"backend": "inductor", |
|
"mode": "reduce-overhead", |
|
"fullgraph": true |
|
}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 2, |
|
"duration": 0, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 7 |
|
}, |
|
"new_tokens": null, |
|
"memory": true, |
|
"latency": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 128, |
|
"min_new_tokens": 128, |
|
"do_sample": false |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"numactl": false, |
|
"numactl_kwargs": {}, |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " AMD EPYC 7R32", |
|
"cpu_count": 16, |
|
"cpu_ram_mb": 66697.261056, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.29", |
|
"processor": "x86_64", |
|
"python_version": "3.8.10", |
|
"gpu": [ |
|
"NVIDIA A10G" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 24146608128, |
|
"optimum_benchmark_version": "0.4.0", |
|
"optimum_benchmark_commit": null, |
|
"transformers_version": "4.45.0.dev0", |
|
"transformers_commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b", |
|
"accelerate_version": "0.35.0.dev0", |
|
"accelerate_commit": null, |
|
"diffusers_version": null, |
|
"diffusers_commit": null, |
|
"optimum_version": "1.22.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "0.9.16", |
|
"timm_commit": null, |
|
"peft_version": "0.12.1.dev0", |
|
"peft_commit": null |
|
} |
|
}, |
|
"report": { |
|
"load": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1335.468032, |
|
"max_global_vram": 6775.373824, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6138.363904, |
|
"max_allocated": 6060.931072 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 1, |
|
"total": 11.5399248046875, |
|
"mean": 11.5399248046875, |
|
"stdev": 0.0, |
|
"p50": 11.5399248046875, |
|
"p90": 11.5399248046875, |
|
"p95": 11.5399248046875, |
|
"p99": 11.5399248046875, |
|
"values": [ |
|
11.5399248046875 |
|
] |
|
}, |
|
"throughput": null, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1805.90592, |
|
"max_global_vram": 6796.345344, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6142.558208, |
|
"max_allocated": 5028.450816 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 0.045890432357788086, |
|
"mean": 0.022945216178894043, |
|
"stdev": 0.000178656578063964, |
|
"p50": 0.022945216178894043, |
|
"p90": 0.023088141441345213, |
|
"p95": 0.023106007099151612, |
|
"p99": 0.023120299625396728, |
|
"values": [ |
|
0.02276655960083008, |
|
0.023123872756958007 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 305.0744846082073 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1832.402944, |
|
"max_global_vram": 6800.539648, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6146.752512, |
|
"max_allocated": 5031.820288 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 5.368739990234375, |
|
"mean": 2.6843699951171875, |
|
"stdev": 0.0015821533203124183, |
|
"p50": 2.6843699951171875, |
|
"p90": 2.6856357177734376, |
|
"p95": 2.6857939331054688, |
|
"p99": 2.6859205053710937, |
|
"values": [ |
|
2.6859521484375, |
|
2.682787841796875 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 47.31091475132352 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"count": 254, |
|
"total": 5.368476690292361, |
|
"mean": 0.02113573500115102, |
|
"stdev": 0.0002265092683081507, |
|
"p50": 0.02106060791015625, |
|
"p90": 0.02135183353424072, |
|
"p95": 0.02160952339172363, |
|
"p99": 0.02202634204864502, |
|
"values": [ |
|
0.02145894432067871, |
|
0.021154815673828126, |
|
0.021111808776855468, |
|
0.021198848724365234, |
|
0.02106982421875, |
|
0.02100223922729492, |
|
0.02102272033691406, |
|
0.02103603172302246, |
|
0.021005311965942384, |
|
0.02103603172302246, |
|
0.021020671844482423, |
|
0.02123161506652832, |
|
0.021008384704589843, |
|
0.021046272277832033, |
|
0.02106879997253418, |
|
0.02103500747680664, |
|
0.021011455535888672, |
|
0.02101043128967285, |
|
0.021011455535888672, |
|
0.021180416107177736, |
|
0.020998144149780275, |
|
0.021179391860961915, |
|
0.02102783966064453, |
|
0.021128192901611328, |
|
0.0212674560546875, |
|
0.02147225570678711, |
|
0.022373376846313478, |
|
0.02251571273803711, |
|
0.021601280212402343, |
|
0.0212541446685791, |
|
0.021176319122314453, |
|
0.021120000839233398, |
|
0.021234687805175782, |
|
0.021061632156372072, |
|
0.020998144149780275, |
|
0.020959232330322267, |
|
0.021029888153076173, |
|
0.020988927841186524, |
|
0.02105753517150879, |
|
0.02108415985107422, |
|
0.021445632934570313, |
|
0.0215285758972168, |
|
0.021037055969238282, |
|
0.021242879867553712, |
|
0.021117952346801756, |
|
0.02104832077026367, |
|
0.02105548858642578, |
|
0.02103500747680664, |
|
0.02106982421875, |
|
0.021015552520751952, |
|
0.021111808776855468, |
|
0.02106879997253418, |
|
0.02106470489501953, |
|
0.02102783966064453, |
|
0.0210882568359375, |
|
0.021082111358642578, |
|
0.020989952087402345, |
|
0.02105855941772461, |
|
0.02106572723388672, |
|
0.021238784790039062, |
|
0.021072895050048827, |
|
0.021001216888427734, |
|
0.0210565128326416, |
|
0.020940799713134766, |
|
0.020951040267944337, |
|
0.02102272033691406, |
|
0.02127257537841797, |
|
0.022042623519897463, |
|
0.02162483215332031, |
|
0.021352447509765626, |
|
0.021115903854370118, |
|
0.02105446434020996, |
|
0.021161983489990235, |
|
0.02099404716491699, |
|
0.02105241584777832, |
|
0.021215232849121093, |
|
0.02169241523742676, |
|
0.02142207908630371, |
|
0.02106265640258789, |
|
0.02106060791015625, |
|
0.021061632156372072, |
|
0.02103603172302246, |
|
0.021024768829345702, |
|
0.02105446434020996, |
|
0.02101759910583496, |
|
0.02107904052734375, |
|
0.02106470489501953, |
|
0.02106777572631836, |
|
0.021372928619384765, |
|
0.021106687545776368, |
|
0.02106368064880371, |
|
0.021007360458374022, |
|
0.021006336212158205, |
|
0.021032960891723632, |
|
0.021170175552368165, |
|
0.021183488845825195, |
|
0.021102592468261717, |
|
0.021008384704589843, |
|
0.021094400405883788, |
|
0.021183488845825195, |
|
0.02130534362792969, |
|
0.021115903854370118, |
|
0.020997119903564454, |
|
0.020992000579833983, |
|
0.021046272277832033, |
|
0.020992000579833983, |
|
0.021163007736206055, |
|
0.02112512016296387, |
|
0.021082111358642578, |
|
0.021024768829345702, |
|
0.020967424392700194, |
|
0.020985855102539062, |
|
0.020980735778808594, |
|
0.020938751220703124, |
|
0.021167104721069335, |
|
0.02109951972961426, |
|
0.021121023178100586, |
|
0.02104729652404785, |
|
0.021201919555664063, |
|
0.021132287979125978, |
|
0.021000192642211913, |
|
0.020999168395996092, |
|
0.021300224304199217, |
|
0.021016576766967773, |
|
0.021144575119018554, |
|
0.021383167266845703, |
|
0.021936128616333008, |
|
0.021007360458374022, |
|
0.020999168395996092, |
|
0.021210111618041993, |
|
0.02107596778869629, |
|
0.020992000579833983, |
|
0.020966400146484376, |
|
0.02104319953918457, |
|
0.021242879867553712, |
|
0.021086208343505858, |
|
0.020981760025024415, |
|
0.021275648117065428, |
|
0.02100223922729492, |
|
0.020943872451782225, |
|
0.020996095657348633, |
|
0.021024768829345702, |
|
0.021016576766967773, |
|
0.021016576766967773, |
|
0.02103398323059082, |
|
0.021246976852416992, |
|
0.020977664947509765, |
|
0.02102272033691406, |
|
0.02103193664550781, |
|
0.020981760025024415, |
|
0.021016576766967773, |
|
0.021000192642211913, |
|
0.021113855361938477, |
|
0.02125004768371582, |
|
0.02104115104675293, |
|
0.02103603172302246, |
|
0.021045248031616212, |
|
0.02103193664550781, |
|
0.02102783966064453, |
|
0.021001216888427734, |
|
0.021014528274536134, |
|
0.021192703247070312, |
|
0.021180416107177736, |
|
0.02104422378540039, |
|
0.020981760025024415, |
|
0.020999168395996092, |
|
0.02107084846496582, |
|
0.021032960891723632, |
|
0.02100223922729492, |
|
0.021193727493286133, |
|
0.020976640701293944, |
|
0.021194751739501954, |
|
0.02109235191345215, |
|
0.021350400924682617, |
|
0.021952512741088868, |
|
0.021142528533935546, |
|
0.021015552520751952, |
|
0.021203968048095705, |
|
0.0210513916015625, |
|
0.020963327407836914, |
|
0.020960256576538085, |
|
0.021011455535888672, |
|
0.021394432067871092, |
|
0.02108518409729004, |
|
0.021363712310791014, |
|
0.021178367614746094, |
|
0.02103500747680664, |
|
0.02128998374938965, |
|
0.021529600143432616, |
|
0.021163007736206055, |
|
0.02109337615966797, |
|
0.02109235191345215, |
|
0.021014528274536134, |
|
0.02124595260620117, |
|
0.021139455795288087, |
|
0.02106470489501953, |
|
0.021028863906860353, |
|
0.02106060791015625, |
|
0.02109337615966797, |
|
0.021053440093994142, |
|
0.02201190376281738, |
|
0.02166476821899414, |
|
0.022009855270385743, |
|
0.02166579246520996, |
|
0.02131046485900879, |
|
0.02106982421875, |
|
0.021037055969238282, |
|
0.021045248031616212, |
|
0.02102681541442871, |
|
0.021163007736206055, |
|
0.021122047424316406, |
|
0.02102783966064453, |
|
0.021011455535888672, |
|
0.02109542465209961, |
|
0.020989952087402345, |
|
0.02110054397583008, |
|
0.02101862335205078, |
|
0.0210565128326416, |
|
0.021072895050048827, |
|
0.021073919296264648, |
|
0.02106470489501953, |
|
0.021144575119018554, |
|
0.021352447509765626, |
|
0.021729280471801758, |
|
0.02125209617614746, |
|
0.02104934310913086, |
|
0.020968448638916014, |
|
0.0210513916015625, |
|
0.021028863906860353, |
|
0.021086208343505858, |
|
0.020968448638916014, |
|
0.020968448638916014, |
|
0.02104422378540039, |
|
0.021014528274536134, |
|
0.021028863906860353, |
|
0.021999616622924805, |
|
0.021121023178100586, |
|
0.020993024826049804, |
|
0.02104934310913086, |
|
0.021131263732910157, |
|
0.021020671844482423, |
|
0.02102374458312988, |
|
0.02102374458312988, |
|
0.021028863906860353, |
|
0.020997119903564454, |
|
0.02105241584777832, |
|
0.02109644889831543, |
|
0.021126144409179686, |
|
0.02106368064880371, |
|
0.02104729652404785, |
|
0.021028863906860353, |
|
0.021194751739501954, |
|
0.020999168395996092, |
|
0.02104422378540039 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 47.31323514159983 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |