Upload folder using huggingface_hub
Browse files- base_results.json +9 -9
- config.json +1 -1
- smashed_results.json +9 -9
base_results.json
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
{
|
2 |
"perplexity_y_gt": 37.44612121582031,
|
3 |
-
"inference_elapsed_time_ms_@1":
|
4 |
-
"inference_latency_ms_@1":
|
5 |
-
"inference_throughput_batches_per_ms_@1": 0.
|
6 |
-
"Loading model_emissions":
|
7 |
-
"Loading model_energy_consumed": 0.
|
8 |
-
"Inference_emissions": 1.
|
9 |
-
"Inference_energy_consumed": 5.
|
10 |
-
"tracker_emissions":
|
11 |
-
"tracker_energy_consumed": 0.
|
12 |
"disk_memory": 24487.82421875
|
13 |
}
|
|
|
1 |
{
|
2 |
"perplexity_y_gt": 37.44612121582031,
|
3 |
+
"inference_elapsed_time_ms_@1": 319.47897720336914,
|
4 |
+
"inference_latency_ms_@1": 31.947897720336915,
|
5 |
+
"inference_throughput_batches_per_ms_@1": 0.031300964112059085,
|
6 |
+
"Loading model_emissions": 4.8975536774844216e-05,
|
7 |
+
"Loading model_energy_consumed": 0.00016841366817917233,
|
8 |
+
"Inference_emissions": 1.4613342809451579e-05,
|
9 |
+
"Inference_energy_consumed": 5.0251346467397676e-05,
|
10 |
+
"tracker_emissions": 6.746898007251942e-05,
|
11 |
+
"tracker_energy_consumed": 0.00023200763423090873,
|
12 |
"disk_memory": 24487.82421875
|
13 |
}
|
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "/tmp/models/
|
3 |
"architectures": [
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "/tmp/models/tmpro8y02vlphg17zf_",
|
3 |
"architectures": [
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
smashed_results.json
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
{
|
2 |
"perplexity_y_gt": 43.43341064453125,
|
3 |
-
"inference_elapsed_time_ms_@1":
|
4 |
-
"inference_latency_ms_@1":
|
5 |
-
"inference_throughput_batches_per_ms_@1": 0.
|
6 |
-
"Loading model_emissions": 2.
|
7 |
-
"Loading model_energy_consumed": 7.
|
8 |
-
"Inference_emissions": 2.
|
9 |
-
"Inference_energy_consumed": 7.
|
10 |
-
"tracker_emissions": 4.
|
11 |
-
"tracker_energy_consumed": 0.
|
12 |
"disk_memory": 28615.82421875
|
13 |
}
|
|
|
1 |
{
|
2 |
"perplexity_y_gt": 43.43341064453125,
|
3 |
+
"inference_elapsed_time_ms_@1": 499.3297576904297,
|
4 |
+
"inference_latency_ms_@1": 49.93297576904297,
|
5 |
+
"inference_throughput_batches_per_ms_@1": 0.02002684567860207,
|
6 |
+
"Loading model_emissions": 2.1109076269491937e-05,
|
7 |
+
"Loading model_energy_consumed": 7.258842272138352e-05,
|
8 |
+
"Inference_emissions": 2.0490007458832622e-05,
|
9 |
+
"Inference_energy_consumed": 7.045961196964501e-05,
|
10 |
+
"tracker_emissions": 4.548937845400742e-05,
|
11 |
+
"tracker_energy_consumed": 0.00015642570950983448,
|
12 |
"disk_memory": 28615.82421875
|
13 |
}
|