Spaces:
Sleeping
Sleeping
Commit
·
c80ed73
1
Parent(s):
3075c72
add files
Browse files- main.py +83 -0
- requirements.txt +2 -0
main.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import torch
|
3 |
+
import subprocess
|
4 |
+
import tempfile
|
5 |
+
|
6 |
+
def process_code(pytorch_code):
|
7 |
+
output = {}
|
8 |
+
|
9 |
+
try:
|
10 |
+
# Step 1: Write the input code to a temporary file
|
11 |
+
with tempfile.NamedTemporaryFile(delete=False, suffix=".py") as code_file:
|
12 |
+
code_file.write(pytorch_code.encode())
|
13 |
+
code_file.flush()
|
14 |
+
|
15 |
+
# Step 2: Run the PyTorch code to generate TorchScript
|
16 |
+
script_output_path = tempfile.NamedTemporaryFile(delete=False, suffix=".pt").name
|
17 |
+
subprocess.run(
|
18 |
+
["python3", code_file.name, script_output_path],
|
19 |
+
check=True,
|
20 |
+
capture_output=True,
|
21 |
+
)
|
22 |
+
|
23 |
+
# Step 3: Convert TorchScript to MLIR (torch dialect)
|
24 |
+
torch_mlir_path = tempfile.NamedTemporaryFile(delete=False, suffix=".mlir").name
|
25 |
+
subprocess.run(
|
26 |
+
["torchscript-to-mlir", "-i", script_output_path, "-o", torch_mlir_path],
|
27 |
+
check=True,
|
28 |
+
capture_output=True,
|
29 |
+
)
|
30 |
+
with open(torch_mlir_path, "r") as file:
|
31 |
+
output["Torch Dialect"] = file.read()
|
32 |
+
|
33 |
+
# Step 4: Lower Torch dialect to Linalg dialect
|
34 |
+
linalg_mlir_path = tempfile.NamedTemporaryFile(delete=False, suffix=".mlir").name
|
35 |
+
subprocess.run(
|
36 |
+
["mlir-opt", torch_mlir_path, "-convert-torch-to-linalg", "-o", linalg_mlir_path],
|
37 |
+
check=True,
|
38 |
+
capture_output=True,
|
39 |
+
)
|
40 |
+
with open(linalg_mlir_path, "r") as file:
|
41 |
+
output["Linalg Dialect"] = file.read()
|
42 |
+
|
43 |
+
# Step 5: Lower Linalg dialect to GPU dialect
|
44 |
+
gpu_mlir_path = tempfile.NamedTemporaryFile(delete=False, suffix=".mlir").name
|
45 |
+
subprocess.run(
|
46 |
+
["mlir-opt", linalg_mlir_path, "-convert-linalg-to-gpu", "-o", gpu_mlir_path],
|
47 |
+
check=True,
|
48 |
+
capture_output=True,
|
49 |
+
)
|
50 |
+
with open(gpu_mlir_path, "r") as file:
|
51 |
+
output["GPU Dialect"] = file.read()
|
52 |
+
|
53 |
+
# Step 6: Lower GPU dialect to LLVM dialect
|
54 |
+
llvm_mlir_path = tempfile.NamedTemporaryFile(delete=False, suffix=".mlir").name
|
55 |
+
subprocess.run(
|
56 |
+
["mlir-opt", gpu_mlir_path, "-convert-gpu-to-llvm", "-o", llvm_mlir_path],
|
57 |
+
check=True,
|
58 |
+
capture_output=True,
|
59 |
+
)
|
60 |
+
with open(llvm_mlir_path, "r") as file:
|
61 |
+
output["LLVM Dialect"] = file.read()
|
62 |
+
|
63 |
+
except subprocess.CalledProcessError as e:
|
64 |
+
output["Error"] = f"An error occurred: {e.stderr.decode()}"
|
65 |
+
|
66 |
+
return output
|
67 |
+
|
68 |
+
# Gradio interface
|
69 |
+
iface = gr.Interface(
|
70 |
+
fn=process_code,
|
71 |
+
inputs="text",
|
72 |
+
outputs=[
|
73 |
+
gr.Textbox(label="Torch Dialect"),
|
74 |
+
gr.Textbox(label="Linalg Dialect"),
|
75 |
+
gr.Textbox(label="GPU Dialect"),
|
76 |
+
gr.Textbox(label="LLVM Dialect"),
|
77 |
+
],
|
78 |
+
title="PyTorch to MLIR Lowering",
|
79 |
+
description="Input PyTorch code for matrix multiplication to see each lowering step in MLIR.",
|
80 |
+
)
|
81 |
+
|
82 |
+
if __name__ == "__main__":
|
83 |
+
iface.launch()
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
pytorch
|
2 |
+
gradio
|