Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -4,8 +4,6 @@ import os
|
|
4 |
import torch
|
5 |
import spaces
|
6 |
|
7 |
-
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
8 |
-
print(f'[INFO] Using device: {device}')
|
9 |
|
10 |
# Define model paths
|
11 |
MODEL_PATHS = {
|
@@ -20,7 +18,9 @@ TOKEN = os.environ['TOKEN']
|
|
20 |
|
21 |
# Translation function for Nano and Large models
|
22 |
@spaces.GPU
|
23 |
-
def translate_nano_large(text, model_path
|
|
|
|
|
24 |
translator = pipeline("translation", model=model_path, token=TOKEN)
|
25 |
translated = translator(
|
26 |
text,
|
@@ -38,7 +38,9 @@ def translate_nano_large(text, model_path, device='cuda:0'):
|
|
38 |
|
39 |
# Translation function for Ultra and Supreme models
|
40 |
@spaces.GPU
|
41 |
-
def translate_ultra_supreme(text, model_path
|
|
|
|
|
42 |
model = AutoModelForSeq2SeqLM.from_pretrained(model_path, token=TOKEN)
|
43 |
tokenizer = AutoTokenizer.from_pretrained(model_path, src_lang="eng_Latn", tgt_lang="ary_Arab", token=TOKEN)
|
44 |
translator = pipeline(
|
|
|
4 |
import torch
|
5 |
import spaces
|
6 |
|
|
|
|
|
7 |
|
8 |
# Define model paths
|
9 |
MODEL_PATHS = {
|
|
|
18 |
|
19 |
# Translation function for Nano and Large models
|
20 |
@spaces.GPU
|
21 |
+
def translate_nano_large(text, model_path):
|
22 |
+
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
23 |
+
print(f'[INFO] Using device: {device}')
|
24 |
translator = pipeline("translation", model=model_path, token=TOKEN)
|
25 |
translated = translator(
|
26 |
text,
|
|
|
38 |
|
39 |
# Translation function for Ultra and Supreme models
|
40 |
@spaces.GPU
|
41 |
+
def translate_ultra_supreme(text, model_path):
|
42 |
+
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
43 |
+
print(f'[INFO] Using device: {device}')
|
44 |
model = AutoModelForSeq2SeqLM.from_pretrained(model_path, token=TOKEN)
|
45 |
tokenizer = AutoTokenizer.from_pretrained(model_path, src_lang="eng_Latn", tgt_lang="ary_Arab", token=TOKEN)
|
46 |
translator = pipeline(
|