danielhn commited on
Commit
2093a1b
·
1 Parent(s): 3a135ce

Latest dashboard version

Browse files
Files changed (1) hide show
  1. graphs.py +39 -38
graphs.py CHANGED
@@ -18,9 +18,9 @@ colors = {
18
  "ocean_green": "#3ba272",
19
  }
20
  device_colors = {
21
- "x86": "#0071c5",
22
- "nvidia": "#76b900",
23
- "groq": "#F55036",
24
  }
25
 
26
 
@@ -64,19 +64,19 @@ def stages_count_summary(current_df: pd.DataFrame, prev_df: pd.DataFrame) -> Non
64
  )
65
 
66
  kpi[1].metric(
67
- label="Convert to ONNX",
68
  value=current.base_onnx,
69
  delta=current.base_onnx - prev.base_onnx,
70
  )
71
 
72
  kpi[2].metric(
73
- label="Optimize ONNX file",
74
  value=current.optimized_onnx,
75
  delta=current.optimized_onnx - prev.optimized_onnx,
76
  )
77
 
78
  kpi[3].metric(
79
- label="All ops supported",
80
  value=current.all_ops_supported,
81
  delta=current.all_ops_supported - prev.all_ops_supported,
82
  )
@@ -102,12 +102,13 @@ def stages_count_summary(current_df: pd.DataFrame, prev_df: pd.DataFrame) -> Non
102
  # Show Sankey graph with percentages
103
  sk_val = {
104
  "All models": "100%",
105
- "Convert to ONNX": str(int(100 * current.base_onnx / current.all_models)) + "%",
106
- "Optimize ONNX file": str(
 
107
  int(100 * current.optimized_onnx / current.all_models)
108
  )
109
  + "%",
110
- "All ops supported": str(
111
  int(100 * current.all_ops_supported / current.all_models)
112
  )
113
  + "%",
@@ -138,18 +139,18 @@ def stages_count_summary(current_df: pd.DataFrame, prev_df: pd.DataFrame) -> Non
138
  "itemStyle": {"color": "white", "borderColor": "white"},
139
  },
140
  {
141
- "name": "Convert to ONNX",
142
- "value": sk_val["Convert to ONNX"],
143
  "itemStyle": {"color": "white", "borderColor": "white"},
144
  },
145
  {
146
- "name": "Optimize ONNX file",
147
- "value": sk_val["Optimize ONNX file"],
148
  "itemStyle": {"color": "white", "borderColor": "white"},
149
  },
150
  {
151
- "name": "All ops supported",
152
- "value": sk_val["All ops supported"],
153
  "itemStyle": {"color": "white", "borderColor": "white"},
154
  },
155
  {
@@ -179,21 +180,21 @@ def stages_count_summary(current_df: pd.DataFrame, prev_df: pd.DataFrame) -> Non
179
  "links": [
180
  {
181
  "source": "All models",
182
- "target": "Convert to ONNX",
183
  "value": current.base_onnx,
184
  },
185
  {
186
- "source": "Convert to ONNX",
187
- "target": "Optimize ONNX file",
188
  "value": current.optimized_onnx,
189
  },
190
  {
191
- "source": "Optimize ONNX file",
192
- "target": "All ops supported",
193
  "value": current.all_ops_supported,
194
  },
195
  {
196
- "source": "All ops supported",
197
  "target": "Converts to FP16",
198
  "value": current.fp16_onnx,
199
  },
@@ -489,14 +490,14 @@ def speedup_bar_chart(df: pd.DataFrame, baseline) -> None:
489
  )
490
 
491
 
492
- def kpi_to_markdown(compute_ratio, device, is_baseline=False, color="#FFFFFF"):
493
 
494
  title = f"""<br><br>
495
  <p style="font-family:sans-serif; font-size: 20px;text-align: center;">Median {device} Acceleration ({len(compute_ratio)} models):</p>"""
496
  if is_baseline:
497
  return (
498
  title
499
- + f"""<p style="font-family:sans-serif; color:{color}; font-size: 26px;text-align: center;"> {1}x (Baseline)</p>"""
500
  )
501
 
502
  if len(compute_ratio) > 0:
@@ -510,8 +511,8 @@ def kpi_to_markdown(compute_ratio, device, is_baseline=False, color="#FFFFFF"):
510
 
511
  return (
512
  title
513
- + f"""<p style="font-family:sans-serif; color:{color}; font-size: 26px;text-align: center;"> {kpi_median}x</p>
514
- <p style="font-family:sans-serif; color:{color}; font-size: 20px;text-align: center;"> min {kpi_min}x; max {kpi_max}x</p>
515
  """
516
  )
517
 
@@ -536,19 +537,19 @@ def speedup_text_summary(df: pd.DataFrame, baseline) -> None:
536
  x86_text = kpi_to_markdown(
537
  x86_compute_ratio,
538
  device="Intel(R) Xeon(R) X40 CPU @ 2.00GHz",
539
- color=device_colors["x86"],
540
  is_baseline=baseline == "x86",
541
  )
542
  groq_text = kpi_to_markdown(
543
  groq_compute_ratio,
544
  device="GroqChip 1",
545
- color=device_colors["groq"],
546
  is_baseline=baseline == "groq",
547
  )
548
  nvidia_text = kpi_to_markdown(
549
  nvidia_compute_ratio,
550
  device="NVIDIA A100-PCIE-40GB",
551
- color=device_colors["nvidia"],
552
  is_baseline=baseline == "nvidia",
553
  )
554
 
@@ -650,10 +651,10 @@ def device_funnel(df: pd.DataFrame) -> None:
650
  # Show Sankey graph with percentages
651
  sk_val = {
652
  "All models": f"{summ.all_models} models - 100%",
653
- "Convert to ONNX": f"{summ.base_onnx} models - "
654
  + str(int(100 * summ.base_onnx / summ.all_models))
655
  + "%",
656
- "Optimize ONNX file": f"{summ.optimized_onnx} models - "
657
  + str(int(100 * summ.optimized_onnx / summ.all_models))
658
  + "%",
659
  "Converts to FP16": f"{summ.fp16_onnx} models - "
@@ -693,13 +694,13 @@ def device_funnel(df: pd.DataFrame) -> None:
693
  "itemStyle": {"color": "white", "borderColor": "white"},
694
  },
695
  {
696
- "name": "Convert to ONNX",
697
- "value": sk_val["Convert to ONNX"],
698
  "itemStyle": {"color": "white", "borderColor": "white"},
699
  },
700
  {
701
- "name": "Optimize ONNX file",
702
- "value": sk_val["Optimize ONNX file"],
703
  "itemStyle": {"color": "white", "borderColor": "white"},
704
  },
705
  {
@@ -743,16 +744,16 @@ def device_funnel(df: pd.DataFrame) -> None:
743
  "links": [
744
  {
745
  "source": "All models",
746
- "target": "Convert to ONNX",
747
  "value": summ.all_models,
748
  },
749
  {
750
- "source": "Convert to ONNX",
751
- "target": "Optimize ONNX file",
752
  "value": summ.optimized_onnx,
753
  },
754
  {
755
- "source": "Optimize ONNX file",
756
  "target": "Converts to FP16",
757
  "value": summ.fp16_onnx,
758
  },
 
18
  "ocean_green": "#3ba272",
19
  }
20
  device_colors = {
21
+ "x86": colors["blue"],
22
+ "nvidia": colors["green"],
23
+ "groq": colors["orange"],
24
  }
25
 
26
 
 
64
  )
65
 
66
  kpi[1].metric(
67
+ label="Converts to ONNX",
68
  value=current.base_onnx,
69
  delta=current.base_onnx - prev.base_onnx,
70
  )
71
 
72
  kpi[2].metric(
73
+ label="Optimizes ONNX file",
74
  value=current.optimized_onnx,
75
  delta=current.optimized_onnx - prev.optimized_onnx,
76
  )
77
 
78
  kpi[3].metric(
79
+ label="Supports all ops",
80
  value=current.all_ops_supported,
81
  delta=current.all_ops_supported - prev.all_ops_supported,
82
  )
 
102
  # Show Sankey graph with percentages
103
  sk_val = {
104
  "All models": "100%",
105
+ "Converts to ONNX": str(int(100 * current.base_onnx / current.all_models))
106
+ + "%",
107
+ "Optimizes ONNX file": str(
108
  int(100 * current.optimized_onnx / current.all_models)
109
  )
110
  + "%",
111
+ "Supports all ops": str(
112
  int(100 * current.all_ops_supported / current.all_models)
113
  )
114
  + "%",
 
139
  "itemStyle": {"color": "white", "borderColor": "white"},
140
  },
141
  {
142
+ "name": "Converts to ONNX",
143
+ "value": sk_val["Converts to ONNX"],
144
  "itemStyle": {"color": "white", "borderColor": "white"},
145
  },
146
  {
147
+ "name": "Optimizes ONNX file",
148
+ "value": sk_val["Optimizes ONNX file"],
149
  "itemStyle": {"color": "white", "borderColor": "white"},
150
  },
151
  {
152
+ "name": "Supports all ops",
153
+ "value": sk_val["Supports all ops"],
154
  "itemStyle": {"color": "white", "borderColor": "white"},
155
  },
156
  {
 
180
  "links": [
181
  {
182
  "source": "All models",
183
+ "target": "Converts to ONNX",
184
  "value": current.base_onnx,
185
  },
186
  {
187
+ "source": "Converts to ONNX",
188
+ "target": "Optimizes ONNX file",
189
  "value": current.optimized_onnx,
190
  },
191
  {
192
+ "source": "Optimizes ONNX file",
193
+ "target": "Supports all ops",
194
  "value": current.all_ops_supported,
195
  },
196
  {
197
+ "source": "Supports all ops",
198
  "target": "Converts to FP16",
199
  "value": current.fp16_onnx,
200
  },
 
490
  )
491
 
492
 
493
+ def kpi_to_markdown(compute_ratio, device, is_baseline=False, color="blue"):
494
 
495
  title = f"""<br><br>
496
  <p style="font-family:sans-serif; font-size: 20px;text-align: center;">Median {device} Acceleration ({len(compute_ratio)} models):</p>"""
497
  if is_baseline:
498
  return (
499
  title
500
+ + f"""<p style="font-family:sans-serif; color:{colors[color]}; font-size: 26px;text-align: center;"> {1}x (Baseline)</p>"""
501
  )
502
 
503
  if len(compute_ratio) > 0:
 
511
 
512
  return (
513
  title
514
+ + f"""<p style="font-family:sans-serif; color:{colors[color]}; font-size: 26px;text-align: center;"> {kpi_median}x</p>
515
+ <p style="font-family:sans-serif; color:{colors[color]}; font-size: 20px;text-align: center;"> min {kpi_min}x; max {kpi_max}x</p>
516
  """
517
  )
518
 
 
537
  x86_text = kpi_to_markdown(
538
  x86_compute_ratio,
539
  device="Intel(R) Xeon(R) X40 CPU @ 2.00GHz",
540
+ color="blue",
541
  is_baseline=baseline == "x86",
542
  )
543
  groq_text = kpi_to_markdown(
544
  groq_compute_ratio,
545
  device="GroqChip 1",
546
+ color="orange",
547
  is_baseline=baseline == "groq",
548
  )
549
  nvidia_text = kpi_to_markdown(
550
  nvidia_compute_ratio,
551
  device="NVIDIA A100-PCIE-40GB",
552
+ color="green",
553
  is_baseline=baseline == "nvidia",
554
  )
555
 
 
651
  # Show Sankey graph with percentages
652
  sk_val = {
653
  "All models": f"{summ.all_models} models - 100%",
654
+ "Converts to ONNX": f"{summ.base_onnx} models - "
655
  + str(int(100 * summ.base_onnx / summ.all_models))
656
  + "%",
657
+ "Optimizes ONNX file": f"{summ.optimized_onnx} models - "
658
  + str(int(100 * summ.optimized_onnx / summ.all_models))
659
  + "%",
660
  "Converts to FP16": f"{summ.fp16_onnx} models - "
 
694
  "itemStyle": {"color": "white", "borderColor": "white"},
695
  },
696
  {
697
+ "name": "Converts to ONNX",
698
+ "value": sk_val["Converts to ONNX"],
699
  "itemStyle": {"color": "white", "borderColor": "white"},
700
  },
701
  {
702
+ "name": "Optimizes ONNX file",
703
+ "value": sk_val["Optimizes ONNX file"],
704
  "itemStyle": {"color": "white", "borderColor": "white"},
705
  },
706
  {
 
744
  "links": [
745
  {
746
  "source": "All models",
747
+ "target": "Converts to ONNX",
748
  "value": summ.all_models,
749
  },
750
  {
751
+ "source": "Converts to ONNX",
752
+ "target": "Optimizes ONNX file",
753
  "value": summ.optimized_onnx,
754
  },
755
  {
756
+ "source": "Optimizes ONNX file",
757
  "target": "Converts to FP16",
758
  "value": summ.fp16_onnx,
759
  },