Spaces:
Building
Building
import os | |
import requests | |
from utils import log | |
class LLMConnector: | |
def __init__(self, service_config): | |
self.service_config = service_config | |
def call_spark(self, project_name, prompt, chat_history): | |
project = self.service_config.get_project(project_name) | |
if not project: | |
raise Exception(f"Project not found: {project_name}") | |
llm_config = project.get("llm", {}) | |
if not llm_config: | |
raise Exception(f"LLM config missing for project: {project_name}") | |
spark_url = self.service_config.llm_inference_service_url | |
work_mode = self.service_config.work_mode | |
cloud_token = self.service_config.get_auth_token() | |
headers = { | |
"Authorization": f"Bearer {cloud_token}", | |
"Content-Type": "application/json" | |
} | |
payload = { | |
"work_mode": work_mode, | |
"cloud_token": cloud_token, | |
"project_name": project_name, | |
"project_version": project.get("project_version"), | |
"repo_id": llm_config.get("repo_id"), | |
"generation_config": llm_config.get("generation_config"), | |
"use_fine_tune": llm_config.get("use_fine_tune"), | |
"fine_tune_zip": llm_config.get("fine_tune_zip"), | |
"user_input": chat_history[-1]["content"] if chat_history else "", | |
"system_prompt": prompt, | |
"context": chat_history | |
} | |
try: | |
log(f"π Sending request to Spark for project: {project_name}") | |
response = requests.post(f"{spark_url}/generate", json=payload, headers=headers, timeout=60) | |
response.raise_for_status() | |
result = response.json() | |
log("β Spark response received successfully.") | |
return result.get("model_answer") | |
except Exception as e: | |
log(f"β Spark microservice error: {e}") | |
return None | |