9a893919ae410d22315e3817853d589a2047afba50bb3ebf93e4199077303ab5
Browse files- base_results.json +19 -0
- plots.png +0 -0
- smashed_results.json +19 -0
base_results.json
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"current_gpu_type": "Tesla T4",
|
3 |
+
"current_gpu_total_memory": 15095.0625,
|
4 |
+
"perplexity": 3.4586403369903564,
|
5 |
+
"memory_inference_first": 810.0,
|
6 |
+
"memory_inference": 810.0,
|
7 |
+
"token_generation_latency_sync": 38.99144744873047,
|
8 |
+
"token_generation_latency_async": 38.517594896256924,
|
9 |
+
"token_generation_throughput_sync": 0.025646649853532412,
|
10 |
+
"token_generation_throughput_async": 0.0259621609992367,
|
11 |
+
"token_generation_CO2_emissions": 1.9730394054846908e-05,
|
12 |
+
"token_generation_energy_consumption": 0.0018913891809545897,
|
13 |
+
"inference_latency_sync": 123.40780677795411,
|
14 |
+
"inference_latency_async": 47.88656234741211,
|
15 |
+
"inference_throughput_sync": 0.008103215072927158,
|
16 |
+
"inference_throughput_async": 0.02088268505776427,
|
17 |
+
"inference_CO2_emissions": 1.9416946448516684e-05,
|
18 |
+
"inference_energy_consumption": 6.635862082185392e-05
|
19 |
+
}
|
plots.png
ADDED
smashed_results.json
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"current_gpu_type": "Tesla T4",
|
3 |
+
"current_gpu_total_memory": 15095.0625,
|
4 |
+
"perplexity": 4.264157295227051,
|
5 |
+
"memory_inference_first": 304.0,
|
6 |
+
"memory_inference": 304.0,
|
7 |
+
"token_generation_latency_sync": 100.45579986572265,
|
8 |
+
"token_generation_latency_async": 97.21409808844328,
|
9 |
+
"token_generation_throughput_sync": 0.009954626824301642,
|
10 |
+
"token_generation_throughput_async": 0.010286573857736372,
|
11 |
+
"token_generation_CO2_emissions": 1.275328979599919e-05,
|
12 |
+
"token_generation_energy_consumption": 0.0046417158591284475,
|
13 |
+
"inference_latency_sync": 98.32678298950195,
|
14 |
+
"inference_latency_async": 86.0386610031128,
|
15 |
+
"inference_throughput_sync": 0.010170168997665334,
|
16 |
+
"inference_throughput_async": 0.011622682040156587,
|
17 |
+
"inference_CO2_emissions": 1.2610012739805677e-05,
|
18 |
+
"inference_energy_consumption": 4.2289036494555656e-05
|
19 |
+
}
|