Spaces:
Sleeping
Sleeping
File size: 2,354 Bytes
7fc61e0 20a632a 7fc61e0 e888e88 7fc61e0 a5d32b6 20a632a 7fc61e0 a5d32b6 7fc61e0 6aab2ec a5d32b6 6aab2ec 7fc61e0 6aab2ec 7fc61e0 6aab2ec 7fc61e0 6aab2ec 7fc61e0 6aab2ec 7fc61e0 6aab2ec 7fc61e0 20a632a 6592996 96d2860 7fc61e0 6592996 6aab2ec |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
from fastapi import FastAPI, HTTPException, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel
import os
import json
from dotenv import load_dotenv
from mistralai import Mistral
from prompts.instruction_prompts import instruction_prompt
from prompts.game_rules import game_rules
from prompts.hints import hints
from prompts.triggers import triggers
from helper_functions import load_chat_history, save_chat_history, update_chat_history
from utils import model, trump_character, client
app = FastAPI()
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
class Message(BaseModel):
message: str
def generate_text(message: Message):
# Load existing chat history
chat_history = load_chat_history()
# Add user message to history
chat_history = update_chat_history(chat_history, user_message=message.message)
# Format the prompt
formatted_prompt = instruction_prompt.format(
hints=hints,
chat_history=chat_history,
character=trump_character,
rules=game_rules,
triggers=triggers
)
# Get Character's response
chat_response = client.chat.complete(
model=model,
messages=[
{
"role": "system",
"content": formatted_prompt
},
{
"role": "user",
"content": message.message
}
]
)
clean_response = chat_response.choices[0].message.content
# Add character response to history
chat_history = update_chat_history(chat_history, character_response=clean_response)
# Save updated chat history
save_chat_history(chat_history)
return {
"character_response": clean_response,
"chat_history": chat_history
}
@app.post("/api/generate-text")
async def inference(message: Message):
return generate_text(message=message)
@app.get("/chat-history", tags=["History"])
def get_chat_history(request: Request):
chat_history = load_chat_history()
return {"chat_history": chat_history}
# Mount static files AFTER defining API routes
app.mount("/", StaticFiles(directory="static", html=True), name="static")
|