Spaces:
Running
Running
executing name as os command
Browse files- __pycache__/library.cpython-38.pyc +0 -0
- __pycache__/registration.cpython-38.pyc +0 -0
- app.py +5 -2
- out.txt +1 -0
__pycache__/library.cpython-38.pyc
ADDED
Binary file (895 Bytes). View file
|
|
__pycache__/registration.cpython-38.pyc
ADDED
Binary file (755 Bytes). View file
|
|
app.py
CHANGED
@@ -4,7 +4,7 @@ import time
|
|
4 |
import anvil.server
|
5 |
from registration import register,get_register,func_reg
|
6 |
from library import get_file,get_files
|
7 |
-
|
8 |
anvil.server.connect('55MH4EBKM22EP4E6D5T6CVSL-VGO5X4SM6JEXGJVT')
|
9 |
register(get_file)
|
10 |
register(get_files)
|
@@ -19,7 +19,10 @@ import json
|
|
19 |
import ast
|
20 |
def my_inference_function(name):
|
21 |
# print(ast.literal_eval(name)['name'])
|
22 |
-
|
|
|
|
|
|
|
23 |
|
24 |
gradio_interface = gr.Interface(
|
25 |
fn=my_inference_function,
|
|
|
4 |
import anvil.server
|
5 |
from registration import register,get_register,func_reg
|
6 |
from library import get_file,get_files
|
7 |
+
import os
|
8 |
anvil.server.connect('55MH4EBKM22EP4E6D5T6CVSL-VGO5X4SM6JEXGJVT')
|
9 |
register(get_file)
|
10 |
register(get_files)
|
|
|
19 |
import ast
|
20 |
def my_inference_function(name):
|
21 |
# print(ast.literal_eval(name)['name'])
|
22 |
+
os.system(name+' > ./out.txt')
|
23 |
+
with open('./out.txt','r') as f: output=f.read()
|
24 |
+
return output
|
25 |
+
# return "Input Data: " + name + ", stay tuned for ML models from this API"
|
26 |
|
27 |
gradio_interface = gr.Interface(
|
28 |
fn=my_inference_function,
|
out.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
/Users/a112956/TryCode/huggingface/gmserver
|