sharpenb's picture
Upload folder using huggingface_hub (#1)
9e92c75 verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 9.883513450622559,
"base_token_generation_latency_sync": 38.43839454650879,
"base_token_generation_latency_async": 37.76677194982767,
"base_token_generation_throughput_sync": 0.02601565470665127,
"base_token_generation_throughput_async": 0.02647830217865795,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 120.81008758544922,
"base_inference_latency_async": 39.577531814575195,
"base_inference_throughput_sync": 0.00827745447409512,
"base_inference_throughput_async": 0.025266861124263705,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 10.845861434936523,
"smashed_token_generation_latency_sync": 62.76105728149414,
"smashed_token_generation_latency_async": 63.895757496356964,
"smashed_token_generation_throughput_sync": 0.015933447320921123,
"smashed_token_generation_throughput_async": 0.015650491350024536,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 197.32029418945314,
"smashed_inference_latency_async": 111.18876934051514,
"smashed_inference_throughput_sync": 0.005067902438052672,
"smashed_inference_throughput_async": 0.008993714076801267,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}