Spaces:
Sleeping
Sleeping
Update tasks/text.py
Browse files- tasks/text.py +4 -3
tasks/text.py
CHANGED
@@ -91,7 +91,7 @@ async def evaluate_text(request: TextEvaluationRequest):
|
|
91 |
model.eval()
|
92 |
|
93 |
predictions = []
|
94 |
-
for text in tqdm(
|
95 |
with torch.no_grad():
|
96 |
tokenized_text = tokenizer(text, truncation=True, padding='max_length', return_tensors = "pt")
|
97 |
inputt = {k:v.to(device) for k,v in tokenized_text.items()}
|
@@ -111,7 +111,7 @@ async def evaluate_text(request: TextEvaluationRequest):
|
|
111 |
model.to(device)
|
112 |
model.eval()
|
113 |
|
114 |
-
for i,text in tqdm(enumerate(
|
115 |
if isinstance(predictions[i], str):
|
116 |
continue
|
117 |
with torch.no_grad():
|
@@ -123,7 +123,8 @@ async def evaluate_text(request: TextEvaluationRequest):
|
|
123 |
|
124 |
prediction = ID2LABEL[taxonomy_prediction]
|
125 |
predictions[i] = prediction
|
126 |
-
|
|
|
127 |
#--------------------------------------------------------------------------------------------
|
128 |
# YOUR MODEL INFERENCE STOPS HERE
|
129 |
#--------------------------------------------------------------------------------------------
|
|
|
91 |
model.eval()
|
92 |
|
93 |
predictions = []
|
94 |
+
for text in tqdm(test_dataset["quote"]):
|
95 |
with torch.no_grad():
|
96 |
tokenized_text = tokenizer(text, truncation=True, padding='max_length', return_tensors = "pt")
|
97 |
inputt = {k:v.to(device) for k,v in tokenized_text.items()}
|
|
|
111 |
model.to(device)
|
112 |
model.eval()
|
113 |
|
114 |
+
for i,text in tqdm(enumerate(test_dataset["quote"])):
|
115 |
if isinstance(predictions[i], str):
|
116 |
continue
|
117 |
with torch.no_grad():
|
|
|
123 |
|
124 |
prediction = ID2LABEL[taxonomy_prediction]
|
125 |
predictions[i] = prediction
|
126 |
+
|
127 |
+
predictions = [LABEL_MAPPING[pred] for pred in predictions]
|
128 |
#--------------------------------------------------------------------------------------------
|
129 |
# YOUR MODEL INFERENCE STOPS HERE
|
130 |
#--------------------------------------------------------------------------------------------
|