File size: 1,577 Bytes
95fc527
b147674
20712aa
 
b147674
20712aa
95fc527
 
 
20712aa
 
b147674
 
 
20712aa
 
 
 
 
b147674
 
 
 
 
 
 
 
 
 
20712aa
 
 
 
 
b147674
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import os
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from transformers import pipeline
import langdetect

# Set custom cache directory to avoid permission issues
os.environ["TRANSFORMERS_CACHE"] = "/app/cache"

app = FastAPI()

# Load sentiment analysis models
multilingual_model = pipeline("sentiment-analysis", model="tabularisai/multilingual-sentiment-analysis")
english_model = pipeline("sentiment-analysis", model="siebert/sentiment-roberta-large-english")

class SentimentRequest(BaseModel):
    text: str

class SentimentResponse(BaseModel):
    original_text: str
    language_detected: str
    sentiment: str
    confidence_score: float

def detect_language(text: str) -> str:
    try:
        return langdetect.detect(text)
    except:
        return "unknown"

@app.get("/")
def home():
    return {"message": "Sentiment Analysis API is running!"}

@app.post("/analyze/", response_model=SentimentResponse)
def analyze_sentiment(request: SentimentRequest):
    if not request.text:
        raise HTTPException(status_code=400, detail="No text provided")
    
    text = request.text
    language = detect_language(text)
    
    # Choose the appropriate model based on language
    if language == "en":
        result = english_model(text)
    else:
        result = multilingual_model(text)
    
    sentiment = result[0]["label"].lower()
    score = result[0]["score"]
    
    return SentimentResponse(
        original_text=text,
        language_detected=language,
        sentiment=sentiment,
        confidence_score=score
    )