Spaces:
Sleeping
Sleeping
Update tasks/text.py
Browse files- tasks/text.py +6 -1
tasks/text.py
CHANGED
@@ -2,12 +2,15 @@ from fastapi import APIRouter
|
|
2 |
from datetime import datetime
|
3 |
from datasets import load_dataset
|
4 |
from sklearn.metrics import accuracy_score
|
5 |
-
from transformers import pipeline
|
6 |
import random
|
7 |
|
8 |
from .utils.evaluation import TextEvaluationRequest
|
9 |
from .utils.emissions import tracker, clean_emissions_data, get_space_info
|
10 |
|
|
|
|
|
|
|
|
|
11 |
router = APIRouter()
|
12 |
|
13 |
DESCRIPTION = "Random Baseline"
|
@@ -62,6 +65,8 @@ async def evaluate_text(request: TextEvaluationRequest):
|
|
62 |
|
63 |
available_pipeline = pipeline(tasks="text_classfication")
|
64 |
print(available_pipeline)
|
|
|
|
|
65 |
predictions = [random.randint(0, 7) for _ in range(len(true_labels))]
|
66 |
|
67 |
#--------------------------------------------------------------------------------------------
|
|
|
2 |
from datetime import datetime
|
3 |
from datasets import load_dataset
|
4 |
from sklearn.metrics import accuracy_score
|
|
|
5 |
import random
|
6 |
|
7 |
from .utils.evaluation import TextEvaluationRequest
|
8 |
from .utils.emissions import tracker, clean_emissions_data, get_space_info
|
9 |
|
10 |
+
#additional imports
|
11 |
+
from transformers import pipeline
|
12 |
+
import logging
|
13 |
+
|
14 |
router = APIRouter()
|
15 |
|
16 |
DESCRIPTION = "Random Baseline"
|
|
|
65 |
|
66 |
available_pipeline = pipeline(tasks="text_classfication")
|
67 |
print(available_pipeline)
|
68 |
+
logging.log(INFO, available_pipeline)
|
69 |
+
|
70 |
predictions = [random.randint(0, 7) for _ in range(len(true_labels))]
|
71 |
|
72 |
#--------------------------------------------------------------------------------------------
|