Update app.py
Browse files
app.py
CHANGED
@@ -29,18 +29,6 @@ languages_list = [("Gujarati", "gu_IN"), ('Hindi',"hi_IN") , ("Bengali","bn_IN")
|
|
29 |
|
30 |
|
31 |
|
32 |
-
def english_to_indian(sentence):
|
33 |
-
translation_tokenizer.src_lang = "en_xx"
|
34 |
-
encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
|
35 |
-
generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id["hi_IN"] )
|
36 |
-
return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
|
37 |
-
|
38 |
-
|
39 |
-
def indian_to_english(sentence):
|
40 |
-
translation_tokenizer.src_lang = "hi_IN"
|
41 |
-
encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
|
42 |
-
generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id["en_XX"] )
|
43 |
-
return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
|
44 |
|
45 |
def intitalize_lang(language):
|
46 |
# translation_tokenizer.src_lang = "en_xx"
|
@@ -194,7 +182,7 @@ def conversation(qa_chain, message, history):
|
|
194 |
# Generate response using QA chain
|
195 |
response = qa_chain({"question": message, "chat_history": formatted_chat_history})
|
196 |
response_answer = response["answer"]
|
197 |
-
print(
|
198 |
if response_answer.find("Helpful Answer:") != -1:
|
199 |
response_answer = response_answer.split("Helpful Answer:")[-1]
|
200 |
response_sources = response["source_documents"]
|
@@ -224,6 +212,20 @@ def upload_file(file_obj):
|
|
224 |
return list_file_path
|
225 |
|
226 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
227 |
def demo():
|
228 |
with gr.Blocks(theme="base") as demo:
|
229 |
vector_db = gr.State()
|
@@ -276,7 +278,7 @@ def demo():
|
|
276 |
with gr.Row():
|
277 |
lang_btn = gr.Dropdown(languages_list, label="Languages", value = languages_list[1],
|
278 |
type="value", info="Choose your language",interactive = True).select(intitalize_lang)
|
279 |
-
|
280 |
|
281 |
chatbot = gr.Chatbot(height=300)
|
282 |
|
|
|
29 |
|
30 |
|
31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
|
33 |
def intitalize_lang(language):
|
34 |
# translation_tokenizer.src_lang = "en_xx"
|
|
|
182 |
# Generate response using QA chain
|
183 |
response = qa_chain({"question": message, "chat_history": formatted_chat_history})
|
184 |
response_answer = response["answer"]
|
185 |
+
print(english_to_indian(response["answer"][:100]))
|
186 |
if response_answer.find("Helpful Answer:") != -1:
|
187 |
response_answer = response_answer.split("Helpful Answer:")[-1]
|
188 |
response_sources = response["source_documents"]
|
|
|
212 |
return list_file_path
|
213 |
|
214 |
|
215 |
+
def english_to_indian(sentence):
|
216 |
+
translation_tokenizer.src_lang = "en_xx"
|
217 |
+
encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
|
218 |
+
generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id[lang_btn] )
|
219 |
+
return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
|
220 |
+
|
221 |
+
|
222 |
+
def indian_to_english(sentence):
|
223 |
+
translation_tokenizer.src_lang = lang_btn
|
224 |
+
encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
|
225 |
+
generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id["en_XX"] )
|
226 |
+
return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
|
227 |
+
|
228 |
+
|
229 |
def demo():
|
230 |
with gr.Blocks(theme="base") as demo:
|
231 |
vector_db = gr.State()
|
|
|
278 |
with gr.Row():
|
279 |
lang_btn = gr.Dropdown(languages_list, label="Languages", value = languages_list[1],
|
280 |
type="value", info="Choose your language",interactive = True).select(intitalize_lang)
|
281 |
+
|
282 |
|
283 |
chatbot = gr.Chatbot(height=300)
|
284 |
|