sharpenb's picture
Upload folder using huggingface_hub (#1)
310f7b3 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 7.200127124786377,
"base_token_generation_latency_sync": 50.63789672851563,
"base_token_generation_latency_async": 49.18233975768089,
"base_token_generation_throughput_sync": 0.019748055598779873,
"base_token_generation_throughput_async": 0.020332501563100774,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 104.84735946655273,
"base_inference_latency_async": 101.91307067871094,
"base_inference_throughput_sync": 0.009537674626121692,
"base_inference_throughput_async": 0.009812284070534775,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 8.361743927001953,
"smashed_token_generation_latency_sync": 66.61556053161621,
"smashed_token_generation_latency_async": 64.81923274695873,
"smashed_token_generation_throughput_sync": 0.015011507702099016,
"smashed_token_generation_throughput_async": 0.015427519852075374,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 146.04206237792968,
"smashed_inference_latency_async": 136.93158626556396,
"smashed_inference_throughput_sync": 0.006847342359574367,
"smashed_inference_throughput_async": 0.007302916932990234,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}