sharpenb's picture
Upload folder using huggingface_hub (#2)
b374d37 verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 11.575923919677734,
"base_token_generation_latency_sync": 41.586693572998044,
"base_token_generation_latency_async": 40.612246096134186,
"base_token_generation_throughput_sync": 0.024046153086074943,
"base_token_generation_throughput_async": 0.024623114851438577,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.88797302246094,
"base_inference_latency_async": 39.07055854797363,
"base_inference_throughput_sync": 0.008341120254094635,
"base_inference_throughput_async": 0.025594719839290965,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 941181.625,
"smashed_token_generation_latency_sync": 169.25389556884767,
"smashed_token_generation_latency_async": 169.4714294746518,
"smashed_token_generation_throughput_sync": 0.005908283508861564,
"smashed_token_generation_throughput_async": 0.005900699622938934,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 267.9193588256836,
"smashed_inference_latency_async": 214.0911102294922,
"smashed_inference_throughput_sync": 0.003732466382358843,
"smashed_inference_throughput_async": 0.004670908562845337,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}