vishwask commited on
Commit
1113ea0
·
verified ·
1 Parent(s): aa0abf0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -31,20 +31,22 @@ languages_list = [("Gujarati", "gu_IN"), ('Hindi',"hi_IN") , ("Bengali","bn_IN")
31
  lang_global = ''
32
  def intitalize_lang(language):
33
  lang_global = language
34
- print(lang_global)
35
 
36
  def english_to_indian(sentence):
37
- print (lang_global)
38
  translation_tokenizer.src_lang = "en_xx"
39
  encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
40
- generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id[lang_global] )
 
41
  return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
42
 
43
 
44
  def indian_to_english(sentence):
45
  translation_tokenizer.src_lang = lang_global
46
  encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
47
- generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id["en_XX"] )
 
48
  return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
49
 
50
 
 
31
  lang_global = ''
32
  def intitalize_lang(language):
33
  lang_global = language
34
+ print("intitalize_lang"+lang_global)
35
 
36
  def english_to_indian(sentence):
37
+ print ("english_to_indian"+lang_global)
38
  translation_tokenizer.src_lang = "en_xx"
39
  encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
40
+ generated_tokens = translation_model.generate(**encoded_hi,
41
+ forced_bos_token_id=translation_tokenizer.lang_code_to_id[lang_global] )
42
  return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
43
 
44
 
45
  def indian_to_english(sentence):
46
  translation_tokenizer.src_lang = lang_global
47
  encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
48
+ generated_tokens = translation_model.generate(**encoded_hi,
49
+ forced_bos_token_id=translation_tokenizer.lang_code_to_id["en_XX"] )
50
  return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
51
 
52