Spaces:
Building
Building
Suppression des variables globales
Browse files- src/display_gloss.py +1 -0
- src/main.py +7 -9
src/display_gloss.py
CHANGED
@@ -181,6 +181,7 @@ def load_data(dataset_path='local_dataset'):
|
|
181 |
vocabulary_list = data_df['gloss'].tolist()
|
182 |
return data_df, vocabulary_list
|
183 |
|
|
|
184 |
def check_gloss_in_vocabulary(gloss, vocabulary_list):
|
185 |
return gloss in vocabulary_list
|
186 |
|
|
|
181 |
vocabulary_list = data_df['gloss'].tolist()
|
182 |
return data_df, vocabulary_list
|
183 |
|
184 |
+
|
185 |
def check_gloss_in_vocabulary(gloss, vocabulary_list):
|
186 |
return gloss in vocabulary_list
|
187 |
|
src/main.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import display_gloss as dg
|
|
|
2 |
import synonyms_preprocess as sp
|
3 |
from NLP_Spacy_base_translator import NlpSpacyBaseTranslator
|
4 |
from flask import Flask, render_template, Response, request
|
@@ -9,16 +10,13 @@ app = Flask(__name__)
|
|
9 |
|
10 |
@app.route('/')
|
11 |
def index():
|
12 |
-
global dataset, vocabulary_list, dict_2000_tokens, nlp, dict_docs_spacy
|
13 |
-
|
14 |
-
dataset, vocabulary_list = dg.load_data()
|
15 |
-
dict_2000_tokens = dataset["gloss"].unique()
|
16 |
-
nlp, dict_docs_spacy = sp.load_spacy_values()
|
17 |
-
|
18 |
return render_template('index.html')
|
19 |
|
20 |
@app.route('/translate/', methods=['POST'])
|
21 |
def result():
|
|
|
|
|
|
|
22 |
if request.method == 'POST':
|
23 |
sentence = request.form['inputSentence']
|
24 |
eng_to_asl_translator = NlpSpacyBaseTranslator(sentence=sentence)
|
@@ -27,17 +25,17 @@ def result():
|
|
27 |
#gloss_list = gloss_list_lower
|
28 |
#gloss_list = sentence.split()
|
29 |
#print('gloss before synonym:', gloss_list_lower)
|
30 |
-
gloss_list = [sp.find_synonyms(gloss, nlp, dict_docs_spacy,
|
31 |
#print('synonym list:', gloss_list)
|
32 |
gloss_sentence = " ".join(gloss_list)
|
33 |
return render_template('translate.html', sentence=sentence, gloss_sentence=gloss_sentence)#gloss_list=gloss_list)
|
34 |
|
35 |
@app.route('/video_feed')
|
36 |
def video_feed():
|
37 |
-
|
38 |
sentence = request.args.get('gloss_sentence_to_display', '')
|
39 |
gloss_list = sentence.split()
|
40 |
-
return Response(dg.generate_video(gloss_list, dataset,
|
41 |
|
42 |
if __name__ == "__main__":
|
43 |
app.debug = True
|
|
|
1 |
import display_gloss as dg
|
2 |
+
import numpy as np
|
3 |
import synonyms_preprocess as sp
|
4 |
from NLP_Spacy_base_translator import NlpSpacyBaseTranslator
|
5 |
from flask import Flask, render_template, Response, request
|
|
|
10 |
|
11 |
@app.route('/')
|
12 |
def index():
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
return render_template('index.html')
|
14 |
|
15 |
@app.route('/translate/', methods=['POST'])
|
16 |
def result():
|
17 |
+
nlp, dict_docs_spacy = sp.load_spacy_values()
|
18 |
+
_, list_2000_tokens = dg.load_data()
|
19 |
+
|
20 |
if request.method == 'POST':
|
21 |
sentence = request.form['inputSentence']
|
22 |
eng_to_asl_translator = NlpSpacyBaseTranslator(sentence=sentence)
|
|
|
25 |
#gloss_list = gloss_list_lower
|
26 |
#gloss_list = sentence.split()
|
27 |
#print('gloss before synonym:', gloss_list_lower)
|
28 |
+
gloss_list = [sp.find_synonyms(gloss, nlp, dict_docs_spacy, list_2000_tokens) for gloss in gloss_list_lower]
|
29 |
#print('synonym list:', gloss_list)
|
30 |
gloss_sentence = " ".join(gloss_list)
|
31 |
return render_template('translate.html', sentence=sentence, gloss_sentence=gloss_sentence)#gloss_list=gloss_list)
|
32 |
|
33 |
@app.route('/video_feed')
|
34 |
def video_feed():
|
35 |
+
dataset, list_2000_tokens = dg.load_data()
|
36 |
sentence = request.args.get('gloss_sentence_to_display', '')
|
37 |
gloss_list = sentence.split()
|
38 |
+
return Response(dg.generate_video(gloss_list, dataset, list_2000_tokens), mimetype='multipart/x-mixed-replace; boundary=frame')
|
39 |
|
40 |
if __name__ == "__main__":
|
41 |
app.debug = True
|