sharpenb's picture
Upload folder using huggingface_hub (#1)
4031e22 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.711935997009277,
"base_token_generation_latency_sync": 39.71796798706055,
"base_token_generation_latency_async": 39.712730795145035,
"base_token_generation_throughput_sync": 0.025177521677991768,
"base_token_generation_throughput_async": 0.025180842011555956,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 120.39700546264649,
"base_inference_latency_async": 39.22069072723389,
"base_inference_throughput_sync": 0.008305854420193638,
"base_inference_throughput_async": 0.025496746269836203,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.126961708068848,
"smashed_token_generation_latency_sync": 46.46282424926758,
"smashed_token_generation_latency_async": 46.16003856062889,
"smashed_token_generation_throughput_sync": 0.021522583186831645,
"smashed_token_generation_throughput_async": 0.021663760065679976,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 164.03681182861328,
"smashed_inference_latency_async": 70.84982395172119,
"smashed_inference_throughput_sync": 0.006096192609771071,
"smashed_inference_throughput_async": 0.014114361112335643,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}