sharpenb's picture
Upload folder using huggingface_hub (#2)
52389a9 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 6.637787342071533,
"base_token_generation_latency_sync": 39.49809379577637,
"base_token_generation_latency_async": 39.11163713783026,
"base_token_generation_throughput_sync": 0.025317677485158348,
"base_token_generation_throughput_async": 0.02556783794234893,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 118.17318344116211,
"base_inference_latency_async": 38.549017906188965,
"base_inference_throughput_sync": 0.008462156733705118,
"base_inference_throughput_async": 0.025940998093221257,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 7.3032684326171875,
"smashed_token_generation_latency_sync": 167.41375885009765,
"smashed_token_generation_latency_async": 167.84572787582874,
"smashed_token_generation_throughput_sync": 0.005973224703086683,
"smashed_token_generation_throughput_async": 0.005957851967133736,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 263.72352142333983,
"smashed_inference_latency_async": 195.48964500427246,
"smashed_inference_throughput_sync": 0.0037918498683883373,
"smashed_inference_throughput_async": 0.005115360457983055,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}