sharpenb commited on
Commit
dd299f5
·
verified ·
1 Parent(s): 3ce4811

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. base_results.json +9 -9
  2. config.json +1 -1
  3. smashed_results.json +9 -9
base_results.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "perplexity_y_gt": 37.44612121582031,
3
- "inference_elapsed_time_ms_@1": 320.3285160064697,
4
- "inference_latency_ms_@1": 32.03285160064697,
5
- "inference_throughput_batches_per_ms_@1": 0.03121795126038054,
6
- "Loading model_emissions": 3.497445709986833e-05,
7
- "Loading model_energy_consumed": 0.00012026772957778693,
8
- "Inference_emissions": 1.4713342422286448e-05,
9
- "Inference_energy_consumed": 5.0595218178114016e-05,
10
- "tracker_emissions": 5.3077428838359856e-05,
11
- "tracker_energy_consumed": 0.00018251896920052905,
12
  "disk_memory": 24487.82421875
13
  }
 
1
  {
2
  "perplexity_y_gt": 37.44612121582031,
3
+ "inference_elapsed_time_ms_@1": 319.47897720336914,
4
+ "inference_latency_ms_@1": 31.947897720336915,
5
+ "inference_throughput_batches_per_ms_@1": 0.031300964112059085,
6
+ "Loading model_emissions": 4.8975536774844216e-05,
7
+ "Loading model_energy_consumed": 0.00016841366817917233,
8
+ "Inference_emissions": 1.4613342809451579e-05,
9
+ "Inference_energy_consumed": 5.0251346467397676e-05,
10
+ "tracker_emissions": 6.746898007251942e-05,
11
+ "tracker_energy_consumed": 0.00023200763423090873,
12
  "disk_memory": 24487.82421875
13
  }
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/tmp/models/tmpxcp36skh7e7arzou",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "/tmp/models/tmpro8y02vlphg17zf_",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
smashed_results.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "perplexity_y_gt": 43.43341064453125,
3
- "inference_elapsed_time_ms_@1": 502.85884857177734,
4
- "inference_latency_ms_@1": 50.285884857177734,
5
- "inference_throughput_batches_per_ms_@1": 0.019886296181129276,
6
- "Loading model_emissions": 2.0989565557677216e-05,
7
- "Loading model_energy_consumed": 7.217745760106331e-05,
8
- "Inference_emissions": 2.0608189677728026e-05,
9
- "Inference_energy_consumed": 7.086600876094986e-05,
10
- "tracker_emissions": 4.5611308446929816e-05,
11
- "tracker_energy_consumed": 0.00015684499388569594,
12
  "disk_memory": 28615.82421875
13
  }
 
1
  {
2
  "perplexity_y_gt": 43.43341064453125,
3
+ "inference_elapsed_time_ms_@1": 499.3297576904297,
4
+ "inference_latency_ms_@1": 49.93297576904297,
5
+ "inference_throughput_batches_per_ms_@1": 0.02002684567860207,
6
+ "Loading model_emissions": 2.1109076269491937e-05,
7
+ "Loading model_energy_consumed": 7.258842272138352e-05,
8
+ "Inference_emissions": 2.0490007458832622e-05,
9
+ "Inference_energy_consumed": 7.045961196964501e-05,
10
+ "tracker_emissions": 4.548937845400742e-05,
11
+ "tracker_energy_consumed": 0.00015642570950983448,
12
  "disk_memory": 28615.82421875
13
  }