Spaces:
Running
Running
vickeee465
commited on
Commit
·
c554973
1
Parent(s):
027da70
cap
Browse files- interfaces/cap.py +1 -1
- utils.py +8 -1
interfaces/cap.py
CHANGED
@@ -104,7 +104,7 @@ def predict(text, model_id, tokenizer_id):
|
|
104 |
|
105 |
gr.Info("Tokenizing")
|
106 |
inputs = tokenizer(text,
|
107 |
-
max_length=
|
108 |
truncation=True,
|
109 |
padding="do_not_pad",
|
110 |
return_tensors="pt").to(device)
|
|
|
104 |
|
105 |
gr.Info("Tokenizing")
|
106 |
inputs = tokenizer(text,
|
107 |
+
max_length=256,
|
108 |
truncation=True,
|
109 |
padding="do_not_pad",
|
110 |
return_tensors="pt").to(device)
|
utils.py
CHANGED
@@ -7,15 +7,22 @@ from interfaces.manifesto import languages as languages_manifesto
|
|
7 |
from interfaces.manifesto import languages as languages_manifesto
|
8 |
"""
|
9 |
|
|
|
|
|
|
|
10 |
from interfaces.cap import build_huggingface_path as hf_cap_path
|
11 |
from interfaces.manifesto import build_huggingface_path as hf_manifesto_path
|
12 |
from interfaces.sentiment import build_huggingface_path as hf_sentiment_path
|
13 |
from interfaces.emotion import build_huggingface_path as hf_emotion_path
|
14 |
|
15 |
-
|
16 |
HF_TOKEN = os.environ["hf_read"]
|
17 |
|
|
|
18 |
models = [hf_manifesto_path(""), hf_sentiment_path(""), hf_emotion_path("")]
|
|
|
|
|
|
|
|
|
19 |
tokenizers = ["xlm-roberta-large"]
|
20 |
|
21 |
def download_hf_models():
|
|
|
7 |
from interfaces.manifesto import languages as languages_manifesto
|
8 |
"""
|
9 |
|
10 |
+
from interfaces.cap import languages as languages_cap
|
11 |
+
from interfaces.cap import domains as domains_cap
|
12 |
+
|
13 |
from interfaces.cap import build_huggingface_path as hf_cap_path
|
14 |
from interfaces.manifesto import build_huggingface_path as hf_manifesto_path
|
15 |
from interfaces.sentiment import build_huggingface_path as hf_sentiment_path
|
16 |
from interfaces.emotion import build_huggingface_path as hf_emotion_path
|
17 |
|
|
|
18 |
HF_TOKEN = os.environ["hf_read"]
|
19 |
|
20 |
+
# should be a temporary solution
|
21 |
models = [hf_manifesto_path(""), hf_sentiment_path(""), hf_emotion_path("")]
|
22 |
+
for languages in languages_cap:
|
23 |
+
for domains in domains_cap:
|
24 |
+
models.append(hf_cap_path(language, domain))
|
25 |
+
|
26 |
tokenizers = ["xlm-roberta-large"]
|
27 |
|
28 |
def download_hf_models():
|