sharpenb's picture
Upload folder using huggingface_hub (#1)
79acd9e verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 29282.24609375,
"base_token_generation_latency_sync": 35.0454402923584,
"base_token_generation_latency_async": 35.173794254660606,
"base_token_generation_throughput_sync": 0.02853438255184508,
"base_token_generation_throughput_async": 0.02843025670645406,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 126.72092132568359,
"base_inference_latency_async": 31.963801383972168,
"base_inference_throughput_sync": 0.007891356766811335,
"base_inference_throughput_async": 0.03128539024464834,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 170638.125,
"smashed_token_generation_latency_sync": 56.02240333557129,
"smashed_token_generation_latency_async": 55.78384846448898,
"smashed_token_generation_throughput_sync": 0.01785000179321212,
"smashed_token_generation_throughput_async": 0.017926335803751196,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 212.82099151611328,
"smashed_inference_latency_async": 109.1862678527832,
"smashed_inference_throughput_sync": 0.004698784611781527,
"smashed_inference_throughput_async": 0.009158660879849,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}