File size: 1,302 Bytes
2733058
 
d398e2e
 
2733058
 
d398e2e
2733058
 
d398e2e
2733058
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from fastapi import FastAPI, Request
from pydantic import BaseModel
from huggingface_hub import InferenceClient

# Initialize FastAPI app
app = FastAPI()

# Initialize Hugging Face Inference Client
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")

# Define expected input format
class InputData(BaseModel):
    message: str  # User message sent from the frontend

# Define the API endpoint
@app.post("/api")
async def get_ai_response(data: InputData):
    try:
        # Extract the user message from the request body
        user_message = data.message

        # Prepare messages for the model
        messages = [
            {"role": "system", "content": "You are a friendly Chatbot."},
            {"role": "user", "content": user_message}
        ]

        # Generate response using the Hugging Face Inference API
        response = ""
        for message in client.chat_completion(
            messages,
            max_tokens=512,
            stream=True,
            temperature=0.7,
            top_p=0.95,
        ):
            token = message.choices[0].delta.content
            response += token

        # Return the AI response as JSON
        return {"response": response.strip()}
    except Exception as e:
        # Handle errors gracefully
        return {"error": str(e)}