Latest dashboard version
Browse files
graphs.py
CHANGED
@@ -18,9 +18,9 @@ colors = {
|
|
18 |
"ocean_green": "#3ba272",
|
19 |
}
|
20 |
device_colors = {
|
21 |
-
"x86": "
|
22 |
-
"nvidia": "
|
23 |
-
"groq": "
|
24 |
}
|
25 |
|
26 |
|
@@ -64,19 +64,19 @@ def stages_count_summary(current_df: pd.DataFrame, prev_df: pd.DataFrame) -> Non
|
|
64 |
)
|
65 |
|
66 |
kpi[1].metric(
|
67 |
-
label="
|
68 |
value=current.base_onnx,
|
69 |
delta=current.base_onnx - prev.base_onnx,
|
70 |
)
|
71 |
|
72 |
kpi[2].metric(
|
73 |
-
label="
|
74 |
value=current.optimized_onnx,
|
75 |
delta=current.optimized_onnx - prev.optimized_onnx,
|
76 |
)
|
77 |
|
78 |
kpi[3].metric(
|
79 |
-
label="
|
80 |
value=current.all_ops_supported,
|
81 |
delta=current.all_ops_supported - prev.all_ops_supported,
|
82 |
)
|
@@ -102,12 +102,13 @@ def stages_count_summary(current_df: pd.DataFrame, prev_df: pd.DataFrame) -> Non
|
|
102 |
# Show Sankey graph with percentages
|
103 |
sk_val = {
|
104 |
"All models": "100%",
|
105 |
-
"
|
106 |
-
|
|
|
107 |
int(100 * current.optimized_onnx / current.all_models)
|
108 |
)
|
109 |
+ "%",
|
110 |
-
"
|
111 |
int(100 * current.all_ops_supported / current.all_models)
|
112 |
)
|
113 |
+ "%",
|
@@ -138,18 +139,18 @@ def stages_count_summary(current_df: pd.DataFrame, prev_df: pd.DataFrame) -> Non
|
|
138 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
139 |
},
|
140 |
{
|
141 |
-
"name": "
|
142 |
-
"value": sk_val["
|
143 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
144 |
},
|
145 |
{
|
146 |
-
"name": "
|
147 |
-
"value": sk_val["
|
148 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
149 |
},
|
150 |
{
|
151 |
-
"name": "
|
152 |
-
"value": sk_val["
|
153 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
154 |
},
|
155 |
{
|
@@ -179,21 +180,21 @@ def stages_count_summary(current_df: pd.DataFrame, prev_df: pd.DataFrame) -> Non
|
|
179 |
"links": [
|
180 |
{
|
181 |
"source": "All models",
|
182 |
-
"target": "
|
183 |
"value": current.base_onnx,
|
184 |
},
|
185 |
{
|
186 |
-
"source": "
|
187 |
-
"target": "
|
188 |
"value": current.optimized_onnx,
|
189 |
},
|
190 |
{
|
191 |
-
"source": "
|
192 |
-
"target": "
|
193 |
"value": current.all_ops_supported,
|
194 |
},
|
195 |
{
|
196 |
-
"source": "
|
197 |
"target": "Converts to FP16",
|
198 |
"value": current.fp16_onnx,
|
199 |
},
|
@@ -489,14 +490,14 @@ def speedup_bar_chart(df: pd.DataFrame, baseline) -> None:
|
|
489 |
)
|
490 |
|
491 |
|
492 |
-
def kpi_to_markdown(compute_ratio, device, is_baseline=False, color="
|
493 |
|
494 |
title = f"""<br><br>
|
495 |
<p style="font-family:sans-serif; font-size: 20px;text-align: center;">Median {device} Acceleration ({len(compute_ratio)} models):</p>"""
|
496 |
if is_baseline:
|
497 |
return (
|
498 |
title
|
499 |
-
+ f"""<p style="font-family:sans-serif; color:{color}; font-size: 26px;text-align: center;"> {1}x (Baseline)</p>"""
|
500 |
)
|
501 |
|
502 |
if len(compute_ratio) > 0:
|
@@ -510,8 +511,8 @@ def kpi_to_markdown(compute_ratio, device, is_baseline=False, color="#FFFFFF"):
|
|
510 |
|
511 |
return (
|
512 |
title
|
513 |
-
+ f"""<p style="font-family:sans-serif; color:{color}; font-size: 26px;text-align: center;"> {kpi_median}x</p>
|
514 |
-
<p style="font-family:sans-serif; color:{color}; font-size: 20px;text-align: center;"> min {kpi_min}x; max {kpi_max}x</p>
|
515 |
"""
|
516 |
)
|
517 |
|
@@ -536,19 +537,19 @@ def speedup_text_summary(df: pd.DataFrame, baseline) -> None:
|
|
536 |
x86_text = kpi_to_markdown(
|
537 |
x86_compute_ratio,
|
538 |
device="Intel(R) Xeon(R) X40 CPU @ 2.00GHz",
|
539 |
-
color=
|
540 |
is_baseline=baseline == "x86",
|
541 |
)
|
542 |
groq_text = kpi_to_markdown(
|
543 |
groq_compute_ratio,
|
544 |
device="GroqChip 1",
|
545 |
-
color=
|
546 |
is_baseline=baseline == "groq",
|
547 |
)
|
548 |
nvidia_text = kpi_to_markdown(
|
549 |
nvidia_compute_ratio,
|
550 |
device="NVIDIA A100-PCIE-40GB",
|
551 |
-
color=
|
552 |
is_baseline=baseline == "nvidia",
|
553 |
)
|
554 |
|
@@ -650,10 +651,10 @@ def device_funnel(df: pd.DataFrame) -> None:
|
|
650 |
# Show Sankey graph with percentages
|
651 |
sk_val = {
|
652 |
"All models": f"{summ.all_models} models - 100%",
|
653 |
-
"
|
654 |
+ str(int(100 * summ.base_onnx / summ.all_models))
|
655 |
+ "%",
|
656 |
-
"
|
657 |
+ str(int(100 * summ.optimized_onnx / summ.all_models))
|
658 |
+ "%",
|
659 |
"Converts to FP16": f"{summ.fp16_onnx} models - "
|
@@ -693,13 +694,13 @@ def device_funnel(df: pd.DataFrame) -> None:
|
|
693 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
694 |
},
|
695 |
{
|
696 |
-
"name": "
|
697 |
-
"value": sk_val["
|
698 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
699 |
},
|
700 |
{
|
701 |
-
"name": "
|
702 |
-
"value": sk_val["
|
703 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
704 |
},
|
705 |
{
|
@@ -743,16 +744,16 @@ def device_funnel(df: pd.DataFrame) -> None:
|
|
743 |
"links": [
|
744 |
{
|
745 |
"source": "All models",
|
746 |
-
"target": "
|
747 |
"value": summ.all_models,
|
748 |
},
|
749 |
{
|
750 |
-
"source": "
|
751 |
-
"target": "
|
752 |
"value": summ.optimized_onnx,
|
753 |
},
|
754 |
{
|
755 |
-
"source": "
|
756 |
"target": "Converts to FP16",
|
757 |
"value": summ.fp16_onnx,
|
758 |
},
|
|
|
18 |
"ocean_green": "#3ba272",
|
19 |
}
|
20 |
device_colors = {
|
21 |
+
"x86": colors["blue"],
|
22 |
+
"nvidia": colors["green"],
|
23 |
+
"groq": colors["orange"],
|
24 |
}
|
25 |
|
26 |
|
|
|
64 |
)
|
65 |
|
66 |
kpi[1].metric(
|
67 |
+
label="Converts to ONNX",
|
68 |
value=current.base_onnx,
|
69 |
delta=current.base_onnx - prev.base_onnx,
|
70 |
)
|
71 |
|
72 |
kpi[2].metric(
|
73 |
+
label="Optimizes ONNX file",
|
74 |
value=current.optimized_onnx,
|
75 |
delta=current.optimized_onnx - prev.optimized_onnx,
|
76 |
)
|
77 |
|
78 |
kpi[3].metric(
|
79 |
+
label="Supports all ops",
|
80 |
value=current.all_ops_supported,
|
81 |
delta=current.all_ops_supported - prev.all_ops_supported,
|
82 |
)
|
|
|
102 |
# Show Sankey graph with percentages
|
103 |
sk_val = {
|
104 |
"All models": "100%",
|
105 |
+
"Converts to ONNX": str(int(100 * current.base_onnx / current.all_models))
|
106 |
+
+ "%",
|
107 |
+
"Optimizes ONNX file": str(
|
108 |
int(100 * current.optimized_onnx / current.all_models)
|
109 |
)
|
110 |
+ "%",
|
111 |
+
"Supports all ops": str(
|
112 |
int(100 * current.all_ops_supported / current.all_models)
|
113 |
)
|
114 |
+ "%",
|
|
|
139 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
140 |
},
|
141 |
{
|
142 |
+
"name": "Converts to ONNX",
|
143 |
+
"value": sk_val["Converts to ONNX"],
|
144 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
145 |
},
|
146 |
{
|
147 |
+
"name": "Optimizes ONNX file",
|
148 |
+
"value": sk_val["Optimizes ONNX file"],
|
149 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
150 |
},
|
151 |
{
|
152 |
+
"name": "Supports all ops",
|
153 |
+
"value": sk_val["Supports all ops"],
|
154 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
155 |
},
|
156 |
{
|
|
|
180 |
"links": [
|
181 |
{
|
182 |
"source": "All models",
|
183 |
+
"target": "Converts to ONNX",
|
184 |
"value": current.base_onnx,
|
185 |
},
|
186 |
{
|
187 |
+
"source": "Converts to ONNX",
|
188 |
+
"target": "Optimizes ONNX file",
|
189 |
"value": current.optimized_onnx,
|
190 |
},
|
191 |
{
|
192 |
+
"source": "Optimizes ONNX file",
|
193 |
+
"target": "Supports all ops",
|
194 |
"value": current.all_ops_supported,
|
195 |
},
|
196 |
{
|
197 |
+
"source": "Supports all ops",
|
198 |
"target": "Converts to FP16",
|
199 |
"value": current.fp16_onnx,
|
200 |
},
|
|
|
490 |
)
|
491 |
|
492 |
|
493 |
+
def kpi_to_markdown(compute_ratio, device, is_baseline=False, color="blue"):
|
494 |
|
495 |
title = f"""<br><br>
|
496 |
<p style="font-family:sans-serif; font-size: 20px;text-align: center;">Median {device} Acceleration ({len(compute_ratio)} models):</p>"""
|
497 |
if is_baseline:
|
498 |
return (
|
499 |
title
|
500 |
+
+ f"""<p style="font-family:sans-serif; color:{colors[color]}; font-size: 26px;text-align: center;"> {1}x (Baseline)</p>"""
|
501 |
)
|
502 |
|
503 |
if len(compute_ratio) > 0:
|
|
|
511 |
|
512 |
return (
|
513 |
title
|
514 |
+
+ f"""<p style="font-family:sans-serif; color:{colors[color]}; font-size: 26px;text-align: center;"> {kpi_median}x</p>
|
515 |
+
<p style="font-family:sans-serif; color:{colors[color]}; font-size: 20px;text-align: center;"> min {kpi_min}x; max {kpi_max}x</p>
|
516 |
"""
|
517 |
)
|
518 |
|
|
|
537 |
x86_text = kpi_to_markdown(
|
538 |
x86_compute_ratio,
|
539 |
device="Intel(R) Xeon(R) X40 CPU @ 2.00GHz",
|
540 |
+
color="blue",
|
541 |
is_baseline=baseline == "x86",
|
542 |
)
|
543 |
groq_text = kpi_to_markdown(
|
544 |
groq_compute_ratio,
|
545 |
device="GroqChip 1",
|
546 |
+
color="orange",
|
547 |
is_baseline=baseline == "groq",
|
548 |
)
|
549 |
nvidia_text = kpi_to_markdown(
|
550 |
nvidia_compute_ratio,
|
551 |
device="NVIDIA A100-PCIE-40GB",
|
552 |
+
color="green",
|
553 |
is_baseline=baseline == "nvidia",
|
554 |
)
|
555 |
|
|
|
651 |
# Show Sankey graph with percentages
|
652 |
sk_val = {
|
653 |
"All models": f"{summ.all_models} models - 100%",
|
654 |
+
"Converts to ONNX": f"{summ.base_onnx} models - "
|
655 |
+ str(int(100 * summ.base_onnx / summ.all_models))
|
656 |
+ "%",
|
657 |
+
"Optimizes ONNX file": f"{summ.optimized_onnx} models - "
|
658 |
+ str(int(100 * summ.optimized_onnx / summ.all_models))
|
659 |
+ "%",
|
660 |
"Converts to FP16": f"{summ.fp16_onnx} models - "
|
|
|
694 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
695 |
},
|
696 |
{
|
697 |
+
"name": "Converts to ONNX",
|
698 |
+
"value": sk_val["Converts to ONNX"],
|
699 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
700 |
},
|
701 |
{
|
702 |
+
"name": "Optimizes ONNX file",
|
703 |
+
"value": sk_val["Optimizes ONNX file"],
|
704 |
"itemStyle": {"color": "white", "borderColor": "white"},
|
705 |
},
|
706 |
{
|
|
|
744 |
"links": [
|
745 |
{
|
746 |
"source": "All models",
|
747 |
+
"target": "Converts to ONNX",
|
748 |
"value": summ.all_models,
|
749 |
},
|
750 |
{
|
751 |
+
"source": "Converts to ONNX",
|
752 |
+
"target": "Optimizes ONNX file",
|
753 |
"value": summ.optimized_onnx,
|
754 |
},
|
755 |
{
|
756 |
+
"source": "Optimizes ONNX file",
|
757 |
"target": "Converts to FP16",
|
758 |
"value": summ.fp16_onnx,
|
759 |
},
|