Spaces:
Running
Running
File size: 3,790 Bytes
a703588 7f07db8 a703588 46cde66 778d09e ceb0714 46cde66 274ad4c f9f8d44 dcfa366 3866637 587246e dcfa366 a703588 054c3f5 ceb0714 778d09e ceb0714 281b6e6 46cde66 55ae956 28a9672 55ae956 a703588 dcfa366 a703588 dcfa366 f9f8d44 dcfa366 5f32ca7 dcfa366 5f32ca7 5ba3309 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
from flask import Flask,request,render_template,send_file,jsonify
import os
from transformers import AutoTokenizer, AutoModel
import anvil.server
import pathlib
import textwrap
import import_ipynb
from library import call_gpt, call_gemini
from background_service import BackgroundTaskService
# anvil.server.connect('PLMOIU5VCGGUOJH2XORIBWV3-ZXZVFLWX7QFIIAF4')
anvil.server.connect("S3SLHUQ2BB33NVTP7FWRAOHS-NDSRD7CDALRPSPLL")
app=Flask(__name__)
MESSAGED={'title':'API Server',
'messageL':['published server functions:','encode(text)',
'call_gemini(text,key)','call_gpt(text,key,model)',
'task_id<=launch(func_name,*args)','poll(task_id)']}
tokenizer = AutoTokenizer.from_pretrained('allenai/specter')
encoder = AutoModel.from_pretrained('allenai/specter')
anvil.server.callable(call_gpt)
anvil.server.callable(call_gemini)
service=BackgroundTaskService(max_tasks=10)
service.register(call_gpt)
service.register(call_gemini)
@anvil.server.callable
def launch(func_name,*args):
global service
# Launch task
task_id = service.launch_task(func_name, *args)
print(f"Task launched with ID: {task_id}")
return task_id
@anvil.server.callable
def poll(task_id):
global service
# Poll for completion; if not complete return "In Progress" else return result
result = service.get_result(task_id)
if result=='No such task': return str(result)
elif result!='In Progress':
del service.results[task_id]
if isinstance(result, (int, float, str, list, dict, tuple)):
return result
else:
print(str(result))
return str(result)
else: return str(result)
@anvil.server.callable
def encode_anvil(text):
inputs = tokenizer(text, padding=True, truncation=True,
return_tensors="pt", max_length=512)
result = encoder(**inputs)
embeddings = result.last_hidden_state[:, 0, :]
emb_array = embeddings.detach().numpy()
embedding=emb_array.tolist()
return embedding
@anvil.server.callable
def reset_service():
global call_gpt, call_gemini, service
service=BackgroundTaskService(max_tasks=10)
service.register(call_gpt)
service.register(call_gemini)
@anvil.server.callable
def print_results_table():
global service
return(service.results)
@app.route('/encode',methods=['GET','POST'])
def encode():
print(request)
if request.method=='GET':
text=request.args.get('text')
elif request.method=='POST':
data=request.get_json()
if 'text' in data: text=data["text"]
if text=='' or text is None: return -1
inputs = tokenizer(text, padding=True, truncation=True,
return_tensors="pt", max_length=512)
result = encoder(**inputs)
embeddings = result.last_hidden_state[:, 0, :]
emb_array = embeddings.detach().numpy()
embedding=emb_array.tolist()
return jsonify({'embedding': embedding})
@app.route("/file/<string:filename>")
def return_file(filename):
return send_file('./data/'+filename)
@app.route('/run',methods=['GET','POST'])
def run_script():
script=''
# print(request.method)
print(request)
if request.method=='GET':
script=request.args.get('script')
print('I am in get')
elif request.method=='POST':
print('I am in post')
data=request.get_json()
if 'script' in data: script=data['script']
if script=='' or script is None: return 'INVALID'
os.system(script+' > ./out.txt')
with open('./out.txt','r') as f: output=f.read()
return output
@app.route('/',methods=['GET', 'POST'])
def home():
return render_template('home.html',messageD=MESSAGED)
if __name__=='__main__':
app.run(host="0.0.0.0", port=7860)
|