Sheemz commited on
Commit
631c1d5
1 Parent(s): 852d544

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -3
app.py CHANGED
@@ -1,9 +1,23 @@
1
  import streamlit as st
2
  import torch
3
  import os
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
  # Load the model checkpoint
6
- model_path = "https://huggingface.co/SLPG/English_to_Urdu_Unsupervised_MT/tree/main/checkpoint_8_96000.pt"
7
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
8
 
9
  # Define a function to load the model
@@ -25,10 +39,13 @@ def translate(model, input_text, src_dict, tgt_dict):
25
  translated_text = "Translated text here"
26
  return translated_text
27
 
 
 
 
28
  # Load model and dictionaries
29
  model = load_model(model_path)
30
- src_dict = load_dictionary("SLPG/English_to_Urdu_Unsupervised_MT/dict.en.txt")
31
- tgt_dict = load_dictionary("SLPG/English_to_Urdu_Unsupervised_MT/dict.ur.txt")
32
 
33
  # Streamlit interface
34
  st.title("Translation Model Inference")
 
1
  import streamlit as st
2
  import torch
3
  import os
4
+ import requests
5
+
6
+ # Define the URL of your model file
7
+ model_url = "https://huggingface.co/SLPG/English_to_Urdu_Unsupervised_MT/resolve/main/checkpoint_8_96000.pt"
8
+ model_path = "checkpoint_8_96000.pt"
9
+
10
+ # Define a function to download the model file
11
+ def download_model(url, file_path):
12
+ if not os.path.exists(file_path):
13
+ with requests.get(url, stream=True) as r:
14
+ r.raise_for_status()
15
+ with open(file_path, 'wb') as f:
16
+ for chunk in r.iter_content(chunk_size=8192):
17
+ f.write(chunk)
18
+ return file_path
19
 
20
  # Load the model checkpoint
 
21
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
22
 
23
  # Define a function to load the model
 
39
  translated_text = "Translated text here"
40
  return translated_text
41
 
42
+ # Download the model file
43
+ download_model(model_url, model_path)
44
+
45
  # Load model and dictionaries
46
  model = load_model(model_path)
47
+ src_dict = load_dictionary("path/to/dict.en.txt")
48
+ tgt_dict = load_dictionary("path/to/dict.ur.txt")
49
 
50
  # Streamlit interface
51
  st.title("Translation Model Inference")