ciyidogan commited on
Commit
092c3de
Β·
verified Β·
1 Parent(s): d57cd71

Update llm_connector.py

Browse files
Files changed (1) hide show
  1. llm_connector.py +50 -51
llm_connector.py CHANGED
@@ -1,51 +1,50 @@
1
- import os
2
- import requests
3
- from log import log
4
-
5
-
6
- class LLMConnector:
7
- def __init__(self, service_config):
8
- self.service_config = service_config
9
-
10
- def call_spark(self, project_name, prompt, chat_history):
11
- project = self.service_config.get_project(project_name)
12
- if not project:
13
- raise Exception(f"Project not found: {project_name}")
14
-
15
- llm_config = project.get("llm", {})
16
- if not llm_config:
17
- raise Exception(f"LLM config missing for project: {project_name}")
18
-
19
- spark_url = self.service_config.llm_inference_service_url
20
- work_mode = self.service_config.work_mode
21
- cloud_token = self.service_config.get_auth_token()
22
-
23
- headers = {
24
- "Authorization": f"Bearer {cloud_token}",
25
- "Content-Type": "application/json"
26
- }
27
-
28
- payload = {
29
- "work_mode": work_mode,
30
- "cloud_token": cloud_token,
31
- "project_name": project_name,
32
- "project_version": project.get("project_version"),
33
- "repo_id": llm_config.get("repo_id"),
34
- "generation_config": llm_config.get("generation_config"),
35
- "use_fine_tune": llm_config.get("use_fine_tune"),
36
- "fine_tune_zip": llm_config.get("fine_tune_zip"),
37
- "user_input": chat_history[-1]["content"] if chat_history else "",
38
- "system_prompt": prompt,
39
- "context": chat_history
40
- }
41
-
42
- try:
43
- log(f"πŸš€ Sending request to Spark for project: {project_name}")
44
- response = requests.post(f"{spark_url}/generate", json=payload, headers=headers, timeout=60)
45
- response.raise_for_status()
46
- result = response.json()
47
- log("βœ… Spark response received successfully.")
48
- return result.get("model_answer")
49
- except Exception as e:
50
- log(f"❌ Spark microservice error: {e}")
51
- return None
 
1
+ import os
2
+ import requests
3
+ from utils import log
4
+
5
+ class LLMConnector:
6
+ def __init__(self, service_config):
7
+ self.service_config = service_config
8
+
9
+ def call_spark(self, project_name, prompt, chat_history):
10
+ project = self.service_config.get_project(project_name)
11
+ if not project:
12
+ raise Exception(f"Project not found: {project_name}")
13
+
14
+ llm_config = project.get("llm", {})
15
+ if not llm_config:
16
+ raise Exception(f"LLM config missing for project: {project_name}")
17
+
18
+ spark_url = self.service_config.llm_inference_service_url
19
+ work_mode = self.service_config.work_mode
20
+ cloud_token = self.service_config.get_auth_token()
21
+
22
+ headers = {
23
+ "Authorization": f"Bearer {cloud_token}",
24
+ "Content-Type": "application/json"
25
+ }
26
+
27
+ payload = {
28
+ "work_mode": work_mode,
29
+ "cloud_token": cloud_token,
30
+ "project_name": project_name,
31
+ "project_version": project.get("project_version"),
32
+ "repo_id": llm_config.get("repo_id"),
33
+ "generation_config": llm_config.get("generation_config"),
34
+ "use_fine_tune": llm_config.get("use_fine_tune"),
35
+ "fine_tune_zip": llm_config.get("fine_tune_zip"),
36
+ "user_input": chat_history[-1]["content"] if chat_history else "",
37
+ "system_prompt": prompt,
38
+ "context": chat_history
39
+ }
40
+
41
+ try:
42
+ log(f"πŸš€ Sending request to Spark for project: {project_name}")
43
+ response = requests.post(f"{spark_url}/generate", json=payload, headers=headers, timeout=60)
44
+ response.raise_for_status()
45
+ result = response.json()
46
+ log("βœ… Spark response received successfully.")
47
+ return result.get("model_answer")
48
+ except Exception as e:
49
+ log(f"❌ Spark microservice error: {e}")
50
+ return None