sharpenb's picture
Upload folder using huggingface_hub (#2)
bb50598 verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 9.883513450622559,
"base_token_generation_latency_sync": 32.2106897354126,
"base_token_generation_latency_async": 32.60008953511715,
"base_token_generation_throughput_sync": 0.03104559412462984,
"base_token_generation_throughput_async": 0.03067476237826862,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.63412399291992,
"base_inference_latency_async": 38.892364501953125,
"base_inference_throughput_sync": 0.008358819094618698,
"base_inference_throughput_async": 0.025711987759185515,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 222301.21875,
"smashed_token_generation_latency_sync": 167.92325897216796,
"smashed_token_generation_latency_async": 167.88078993558884,
"smashed_token_generation_throughput_sync": 0.005955101193967076,
"smashed_token_generation_throughput_async": 0.005956607664186426,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 263.06273193359374,
"smashed_inference_latency_async": 210.80782413482666,
"smashed_inference_throughput_sync": 0.0038013746479772553,
"smashed_inference_throughput_async": 0.0047436569496606,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}