new_ads_obnova / func_ai.py
Uniaff's picture
Update func_ai.py
3ad38de verified
raw
history blame
2.72 kB
import requests
import torch
# from googletrans import Translator
from transformers import pipeline
from deep_translator import GoogleTranslator
import time
import os
VECTOR_API_URL = os.getenv('API_URL')
# translator = Translator()
sentiment_model = pipeline(
'sentiment-analysis',
model='cardiffnlp/twitter-xlm-roberta-base-sentiment',
tokenizer='cardiffnlp/twitter-xlm-roberta-base-sentiment',
device=0 if torch.cuda.is_available() else -1
)
classifier = pipeline(
"zero-shot-classification",
model="valhalla/distilbart-mnli-12-6",
device=0 if torch.cuda.is_available() else -1
)
def classify_comment(text):
if not text:
print("Received empty text for classification.")
return "non-interrogative"
print(f"Classifying comment: {text}")
try:
translated_text = GoogleTranslator(source='auto', target="en").translate(text)
print(f"Translated text: {translated_text}")
except Exception as e:
print(f"Translation failed: {e}")
return "non-interrogative"
if not translated_text:
print("Translation returned empty text.")
return "non-interrogative"
try:
result = classifier(translated_text, ["interrogative", "non-interrogative"], clean_up_tokenization_spaces=True)
print(f"Classification result: {result}")
except Exception as e:
print(f"Classification failed: {e}")
return "non-interrogative"
top_class = result['labels'][0]
print(f"Top class: {top_class}")
return top_class
def retrieve_from_vdb(query):
print(f"Отправка запроса к FastAPI сервису: {query}")
response = requests.post(f"{VECTOR_API_URL}/search/", json={"query": query})
if response.status_code == 200:
results = response.json().get("results", [])
print(f"Получено {len(results)} результатов: {results}")
return results
else:
print(f"Ошибка при поиске: {response.text}")
return []
def analyze_sentiment(comments):
print("Начинаем анализ настроений.")
results = []
for i in range(0, len(comments), 50):
batch = comments[i:i + 50]
print(f"Анализируем батч с {i} по {i + len(batch)} комментарий: {batch}")
batch_results = sentiment_model(batch)
print(f"Результаты батча: {batch_results}")
results.extend(batch_results)
time.sleep(1) # Задержка для предотвращения перегрузки
print("Анализ настроений завершен. Общие результаты: {results}")
return results