Spaces:
Sleeping
Sleeping
File size: 3,978 Bytes
6384276 cdd7729 ef9a278 e04aa8c 6384276 ef9a278 e04aa8c ef9a278 6384276 ff67861 6384276 ff67861 6384276 6fa99ad 6384276 5190354 6384276 6fa99ad 6384276 157f64a 8ece36b 6384276 6fa99ad 6384276 6fa99ad 725eacb 6fa99ad 725eacb 6384276 5190354 725eacb 6fa99ad 725eacb 6384276 ef9a278 725eacb ef9a278 e04aa8c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
from fastapi import FastAPI, HTTPException
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
import requests
from transformers import pipeline, AutoModelForSequenceClassification, AutoTokenizer
import torch
from pydantic import BaseModel
app = FastAPI()
# Load model directly from Hugging Face Hub
model_name = "SandboxBhh/sentiment-thai-text-model"
try:
device = 0 if torch.cuda.is_available() else -1 # Check if GPU is available
# Ensure correct indentation here
reloaded_pipe = pipeline(
"text-classification",
model=model_name,
tokenizer=model_name,
device=device,
)
except Exception as e:
print(f"Error loading model: {e}")
reloaded_pipe = None
class TextInput(BaseModel):
text: str
def send_line_notification(message, line_token, result, score, sentiment):
url = "https://notify-api.line.me/api/notify"
headers = {"Authorization": f"Bearer {line_token}"}
# Create a detailed message including result, score, and sentiment
detailed_message = (f"{message}\n\n"
f"Result: {json.dumps(result, ensure_ascii=False)}\n"
f"Score: {score:.4f}\n"
f"Sentiment: {sentiment}")
data = {"message": detailed_message}
response = requests.post(url, headers=headers, data=data)
return response.status_code
def split_message(message, max_length=1000):
return [message[i:i+max_length] for i in range(0, len(message), max_length)]
# Use environment variable for LINE token
line_token = "C9r65PpEvIvOJSK2xMhgl53WvmOhhnKEOuQq7DsiVJT"
@app.post("/classify-text")
async def classify_text(input: TextInput):
if reloaded_pipe is None:
raise HTTPException(status_code=500, detail="Model not loaded")
try:
result = reloaded_pipe(input.text)
sentiment = result[0]['label'].lower()
score = result[0]['score']
if sentiment == 'neg':
message = f"[แจ้งเตือน CSI-Negetive]: ความพึงพอใจของผู้ป่วย \nข้อความ: {input.text} \ncsi score: {score:.2f}"
message_parts = split_message(message)
notification_status = []
for i, part in enumerate(message_parts):
# status = send_line_notification(part, line_token)
status = 200
if status == 200:
notification_status.append(f"ส่งการแจ้งเตือนส่วนที่ {i+1}/{len(message_parts)} ผ่าน LINE สำเร็จ")
else:
notification_status.append(f"การส่งการแจ้งเตือนส่วนที่ {i+1}/{len(message_parts)} ผ่าน LINE ล้มเหลว")
return {
"result": result,
"sentiment": "negative",
"score": score,
"message": "Negative sentiment detected and notification sent to LINE.",
"formatted_message": message,
"notification_status": notification_status
}
else:
message = f"[ไม่พบความคิดเห็นเชิงลบ]: ข้อความ: {input.text} \ncsi score: {score:.2f}"
return {
"result": result,
"sentiment": "positive" if sentiment == 'pos' else "neutral",
"score": score,
"message": "Sentiment is not negative. No notification sent.",
"formatted_message": message
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
app.mount("/", StaticFiles(directory="static", html=True), name="static")
@app.get("/")
def index() -> FileResponse:
return FileResponse(path="/app/static/index.html", media_type="text/html")
|