ciyidogan commited on
Commit
6c67891
Β·
verified Β·
1 Parent(s): 8b942e9

Delete llm_connector.py

Browse files
Files changed (1) hide show
  1. llm_connector.py +0 -50
llm_connector.py DELETED
@@ -1,50 +0,0 @@
1
- import os
2
- import requests
3
- from utils import log
4
-
5
- class LLMConnector:
6
- def __init__(self, service_config):
7
- self.service_config = service_config
8
-
9
- def call_spark(self, project_name, prompt, chat_history):
10
- project = self.service_config.get_project(project_name)
11
- if not project:
12
- raise Exception(f"Project not found: {project_name}")
13
-
14
- llm_config = project.get("llm", {})
15
- if not llm_config:
16
- raise Exception(f"LLM config missing for project: {project_name}")
17
-
18
- spark_url = self.service_config.llm_inference_service_url
19
- work_mode = self.service_config.work_mode
20
- cloud_token = self.service_config.get_auth_token()
21
-
22
- headers = {
23
- "Authorization": f"Bearer {cloud_token}",
24
- "Content-Type": "application/json"
25
- }
26
-
27
- payload = {
28
- "work_mode": work_mode,
29
- "cloud_token": cloud_token,
30
- "project_name": project_name,
31
- "project_version": project.get("project_version"),
32
- "repo_id": llm_config.get("repo_id"),
33
- "generation_config": llm_config.get("generation_config"),
34
- "use_fine_tune": llm_config.get("use_fine_tune"),
35
- "fine_tune_zip": llm_config.get("fine_tune_zip"),
36
- "user_input": chat_history[-1]["content"] if chat_history else "",
37
- "system_prompt": prompt,
38
- "context": chat_history
39
- }
40
-
41
- try:
42
- log(f"πŸš€ Sending request to Spark for project: {project_name}")
43
- response = requests.post(f"{spark_url}/generate", json=payload, headers=headers, timeout=60)
44
- response.raise_for_status()
45
- result = response.json()
46
- log("βœ… Spark response received successfully.")
47
- return result.get("model_answer")
48
- except Exception as e:
49
- log(f"❌ Spark microservice error: {e}")
50
- return None