File size: 2,470 Bytes
8550cf4
 
127e0c7
8550cf4
127e0c7
8550cf4
127e0c7
8550cf4
127e0c7
 
8550cf4
285a245
8550cf4
 
 
 
 
285a245
8550cf4
 
 
285a245
8550cf4
 
285a245
8550cf4
 
285a245
8550cf4
 
 
 
 
 
 
 
 
 
 
 
285a245
8550cf4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
285a245
8550cf4
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
import os
from openai import OpenAI
from dotenv import load_dotenv
from typing import List, Optional

# Load environment variables
load_dotenv()

app = FastAPI(title="Debyez Chatbot API")

# Initialize OpenAI client
client = OpenAI(
    base_url="https://api-inference.huggingface.co/v1",
    api_key=os.getenv("TOKEN")
)

class Message(BaseModel):
    role: str
    content: str

class ChatRequest(BaseModel):
    messages: List[Message]

class ChatResponse(BaseModel):
    response: str

def get_debyez_prompt_template(customer_message: str) -> str:
    return f"""
    You are a friendly and helpful virtual assistant for Debyez, a company specializing in 
    cutting-edge AI technology service specializing in Generative AI solutions. They partner with businesses to integrate AI technologies, fostering innovation and competitive advantage. 
    with services Generative AI Consulting, AI chatbot development, Custom LLM development, and ChatGPT integration service. Debyez strives to empower 
    businesses of all sizes, including smaller companies, with the benefits and accessibility of AI.
    Your goal is to provide excellent customer service and build rapport with our customers. 
    Be knowledgeable about our products, services, and policies. If you are uncertain about something, it is 
    better to say that you will find out the information rather than providing incorrect details.  
    Here's the latest message from the customer: '{customer_message}'
    Respond in a way that is warm, professional, and relevant to the customer's needs.
    """

@app.post("/chat", response_model=ChatResponse)
async def chat_endpoint(request: ChatRequest):
    try:
        # Process messages with the template
        formatted_messages = [
            {"role": msg.role, "content": get_debyez_prompt_template(msg.content)}
            for msg in request.messages
        ]
        
        # Create chat completion
        completion = client.chat.completions.create(
            model="meta-llama/Meta-Llama-3-8B-Instruct",
            messages=formatted_messages,
            temperature=0.5,
            max_tokens=3000,
        )
        
        return ChatResponse(response=completion.choices[0].message.content)
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/health")
async def health_check():
    return {"status": "healthy"}