purchasing_api / app.py
jonathanjordan21's picture
Update app.py
73b6736 verified
raw
history blame
930 Bytes
from fastapi import FastAPI
import os
from custom_llm import CustomLLM
from pydantic import BaseModel
from langchain.prompts import PromptTemplate
class ConversationPost(BaseModel):
tenant: str | None = None
module: str | None = None
question: str
API_TOKEN = os.environ['HF_API_KEY']
app = FastAPI()
prompt = PromptTemplate.from_template("""<|im_start|>system
Kamu adalah Asisten AI yang dikembangkan oleh Jonthan Jordan. Jawablah pertanyaan user secara ketat dalam Bahasa Indonesia<|im_end|>
<|im_start|>user
{question}<|im_end|>
<|im_start|>assistant
""")
llm = prompt | CustomLLM(repo_id="Qwen/Qwen-VL-Chat", model_type='text-generation', api_token=API_TOKEN, max_new_tokens=150).bind(stop=['<|im_end|>'])
@app.get("/")
def greet_json():
return {"Hello": "World!"}
@app.post("/conversation")
async def conversation(data : ConversationPost):
return {"output":llm.invoke({"question":data.question})}