Spaces:
Sleeping
Sleeping
from typing import * | |
import torch | |
import numpy as np | |
from transformers import AutoModelForSequenceClassification, AutoTokenizer | |
from .common import Grader | |
model_name = "JacobLinCool/IELTS_essay_scoring_safetensors" | |
model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
class IELTS_essay_scoring(Grader): | |
def info(self) -> str: | |
return "Safetensors version of [KevSun/IELTS_essay_scoring](https://huggingface.co/KevSun/IELTS_essay_scoring)" | |
def grade(self, question: str, answer: str) -> Tuple[float, str]: | |
text = f"{question} {answer}" | |
inputs = tokenizer( | |
text, return_tensors="pt", padding=True, truncation=True, max_length=512 | |
) | |
outputs = model(**inputs) | |
predictions = outputs.logits.squeeze() | |
predicted_scores = predictions.numpy() | |
normalized_scores = (predicted_scores / predicted_scores.max()) * 9 | |
rounded_scores = np.round(normalized_scores * 2) / 2 | |
labels = [ | |
"Task Achievement", | |
"Coherence and Cohesion", | |
"Vocabulary", | |
"Grammar", | |
"Overall", | |
] | |
overall_score = float(rounded_scores[-1]) | |
comment = "" | |
for label, score in zip(labels, rounded_scores): | |
comment += f"{label}: {score}\n" | |
return overall_score, comment | |