Spaces:
Sleeping
Sleeping
from fastapi import FastAPI | |
from pydantic import BaseModel | |
from groq import Groq | |
import os | |
import logging | |
#from dotenv import load_dotenv | |
#load_dotenv() | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger("uvicorn") | |
# Initialize the FastAPI app | |
app = FastAPI() | |
# Get the API key from an environment variable | |
groq_api_key = os.getenv("GROQ_API_KEY") | |
if not groq_api_key: | |
raise ValueError("GROQ_API_KEY environment variable is not set") | |
# Initialize Groq client | |
client = Groq(api_key=groq_api_key) | |
# Define the system message | |
System_msg = '''-act as an experienced blockchain developer, working for 15 years. | |
-help me understand some concepts, assume I am a complete beginner. | |
-If the user asks anything not related to blockchain, just say you don't know about it.''' | |
# Request model for FastAPI | |
class ChatRequest(BaseModel): | |
message: str | |
history: list | |
def read_root(): | |
return {"message": "FastAPI is running!"} | |
# FastAPI chat endpoint | |
def chat(request: ChatRequest): | |
logging.info(f"Received request: {request}") | |
message = request.message | |
history = request.history | |
# Create the history_list to send to the Groq API | |
history_list = [{"role": "system", "content": System_msg}] | |
for human, ai in history: | |
history_list.append({"role": "user", "content": human}) | |
history_list.append({"role": "assistant", "content": ai}) | |
# Append the new user message to the history | |
history_list.append({"role": "user", "content": message}) | |
# Try to get the response from the LLaMA API (Groq) | |
try: | |
response = client.chat.completions.create( | |
model="llama-3.1-70b-versatile", # Ensure the correct model name | |
messages=history_list, | |
temperature=1.0, | |
max_tokens=4000, | |
stream=False # Use streaming for real-time responses | |
) | |
final_message = response.choices[0].message.content | |
# Return the final AI-generated message | |
return {"response": final_message} | |
except Exception as e: | |
return {"response": f"Error: {str(e)}"} | |