pvanand commited on
Commit
367cc0b
·
verified ·
1 Parent(s): d82c1af

Create helpers/ai_client.py

Browse files
Files changed (1) hide show
  1. helpers/ai_client.py +60 -0
helpers/ai_client.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # helpers/ai_client.py
2
+ import requests
3
+ import os
4
+ from typing import Optional, Dict, Any
5
+
6
+ class AIClient:
7
+ def __init__(self):
8
+ # Load environment variables
9
+ self.llm_api_url = os.getenv("LLM_API_URL", "https://pvanand-audio-chat.hf.space/llm-agent")
10
+ self.api_key = os.getenv("API_KEY", "44d5c")
11
+
12
+ def send_prompt(
13
+ self,
14
+ prompt: str,
15
+ system_message: str = "",
16
+ model_id: str = "openai/gpt-4o-mini",
17
+ conversation_id: str = "string",
18
+ user_id: str = "string",
19
+ api_key: Optional[str] = None
20
+ ) -> Dict[str, Any]:
21
+ """
22
+ Sends a prompt to the LLM API and returns the response.
23
+
24
+ Args:
25
+ prompt (str): The user's input prompt.
26
+ system_message (str): Optional system message for the LLM.
27
+ model_id (str): The model ID to use (default: "openai/gpt-4o-mini").
28
+ conversation_id (str): Unique ID for the conversation.
29
+ user_id (str): Unique ID for the user.
30
+ api_key (str): API key for authentication.
31
+
32
+ Returns:
33
+ Dict[str, Any]: The JSON response from the LLM API.
34
+
35
+ Raises:
36
+ HTTPException: If the API request fails.
37
+ """
38
+ if api_key is None:
39
+ api_key = self.api_key
40
+
41
+ payload = {
42
+ "prompt": prompt,
43
+ "system_message": system_message,
44
+ "model_id": model_id,
45
+ "conversation_id": conversation_id,
46
+ "user_id": user_id
47
+ }
48
+
49
+ headers = {
50
+ "accept": "application/json",
51
+ "X-API-Key": api_key,
52
+ "Content-Type": "application/json"
53
+ }
54
+
55
+ # Use requests to call the external API
56
+ response = requests.post(self.llm_api_url, json=payload, headers=headers)
57
+ if response.status_code != 200:
58
+ raise Exception(f"Error from LLM API: {response.status_code} - {response.text}")
59
+
60
+ return response.json()