Spaces:
Building
Building
import display_gloss as dg | |
import synonyms_preprocess as sp | |
from NLP_Spacy_base_translator import NlpSpacyBaseTranslator | |
from flask import Flask, render_template, Response, request | |
app = Flask(__name__) | |
def index(): | |
#global dataset, vocabulary_list, dict_2000_tokens, nlp, dict_docs_spacy | |
#dataset, vocabulary_list = dg.load_data() | |
#dict_2000_tokens = dataset["gloss"].unique() | |
#nlp, dict_docs_spacy = sp.load_spacy_values() | |
return render_template('index.html') | |
def result(): | |
if request.method == 'POST': | |
sentence = request.form['inputSentence'] | |
eng_to_asl_translator = NlpSpacyBaseTranslator(sentence=sentence) | |
generated_gloss = eng_to_asl_translator.translate_to_gloss() | |
gloss_list_lower = [gloss.lower() for gloss in generated_gloss.split() if gloss.isalnum() ] | |
gloss_list = gloss_list_lower | |
#print('gloss before synonym:', gloss_list_lower) | |
#gloss_list = [sp.find_synonyms(gloss, nlp, dict_docs_spacy, dict_2000_tokens) for gloss in gloss_list_lower] | |
#print('synonym list:', gloss_list) | |
gloss_sentence = " ".join(gloss_list) | |
return render_template('translate.html', sentence=sentence, gloss_list=gloss_list, gloss_sentence=gloss_sentence) | |
def video_feed(): | |
dataset, vocabulary_list = dg.load_data() | |
sentence = request.args.get('gloss_sentence', '') | |
gloss_list = sentence.split() | |
return Response(dg.generate_video(gloss_list, dataset, vocabulary_list), mimetype='multipart/x-mixed-replace; boundary=frame') | |
if __name__ == "__main__": | |
app.debug = True | |
app.run(host="0.0.0.0", port=5000, debug=True) | |