Spaces:
Sleeping
Sleeping
File size: 4,026 Bytes
8ceaf47 3a47e80 8ceaf47 dee0a33 8ceaf47 dee0a33 8ceaf47 8f13578 3a47e80 8ceaf47 dee0a33 8ceaf47 dee0a33 8f13578 dee0a33 8ceaf47 8f13578 dee0a33 8ceaf47 dee0a33 8ceaf47 dee0a33 8f13578 dee0a33 8f13578 8ceaf47 dee0a33 8f13578 dee0a33 8ceaf47 dee0a33 8f13578 3a47e80 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 |
import os
from typing import Any, Dict, List, Optional, Generator
import requests
class LaasApiClient:
"""
A client for interacting with the LAAS API.
Usage:
client = LaasApiClient()
response = client.call_llm_preset(
params={},
model="your_model",
messages=[],
service_type="AZURE",
max_tokens=0,
function_call={},
response_format={"type": "string"},
source_count=0
)
print(response)
# For chat completions:
chat_response = client.chat_completions(messages=[{"role": "user", "content": "Hello!"}])
print(chat_response)
# For streaming:
for chunk in client.stream(messages=[{"role": "user", "content": "Tell me a story"}]):
print(chunk, end="", flush=True)
"""
BASE_URL = "https://api-laas.wanted.co.kr"
def __init__(
self,
base_url: str = BASE_URL,
api_key: Optional[str] = None,
project: Optional[str] = None,
hash: Optional[str] = None,
):
self.base_url = base_url
self.hash = hash or os.environ.get("LAAS_HASH")
self.headers = {
"Content-Type": "application/json",
"apiKey": api_key or os.environ.get("LAAS_API_KEY"),
"project": project or os.environ.get("LAAS_PROJECT"),
}
if not self.headers["apiKey"]:
raise ValueError("API key is required to use the LAAS API.")
if not self.headers["project"]:
raise ValueError("Project is required to use the LAAS API.")
if not self.hash:
raise ValueError("Hash is required to use the LAAS API.")
def _make_api_call(self, endpoint: str, payload: Dict[str, Any]) -> Dict[str, Any]:
"""Make an API call to the specified endpoint with the given payload."""
url = f"{self.base_url}/api/{endpoint}"
response = requests.post(
url, headers=self.headers, json={"hash": self.hash, **payload}
)
response.raise_for_status() # Raise an exception for HTTP errors
return response.json()
def call_llm_preset(self, complete: bool = False, **kwargs) -> Dict[str, Any]:
"""
Call the LLM preset API.
Args:
complete (bool): Whether to use the 'complete' endpoint.
**kwargs: Keyword arguments for the API call.
Returns:
Dict[str, Any]: The JSON response from the API.
"""
endpoint = "preset/complete" if complete else "preset"
return self._make_api_call(endpoint, kwargs)
def chat_completions(
self, messages: List[Dict[str, str]], **kwargs
) -> Dict[str, Any]:
"""
Call the chat completions API.
Args:
messages (List[Dict[str, str]]): List of message dictionaries.
**kwargs: Additional keyword arguments for the API call.
Returns:
Dict[str, Any]: The JSON response from the API.
"""
payload = {
"messages": messages,
**kwargs,
}
return self._make_api_call("preset/chat/completions", payload)
def stream(
self, messages: List[Dict[str, str]], **kwargs
) -> Generator[str, None, None]:
"""
Stream the chat completions API response.
Args:
messages (List[Dict[str, str]]): List of message dictionaries.
**kwargs: Additional keyword arguments for the API call.
Yields:
str: Chunks of the streaming response.
"""
url = f"{self.base_url}/api/preset/chat/completions"
payload = {
"hash": self.hash,
"messages": messages,
"stream": True,
**kwargs,
}
with requests.post(
url, headers=self.headers, json=payload, stream=True
) as response:
response.raise_for_status()
for line in response.iter_lines():
if line:
yield line.decode("utf-8")
|