Guhanselvam commited on
Commit
b121316
·
verified ·
1 Parent(s): d8cbf26

Update gemini_fastapi_server.py

Browse files
Files changed (1) hide show
  1. gemini_fastapi_server.py +2 -0
gemini_fastapi_server.py CHANGED
@@ -24,6 +24,8 @@ llama_model = HuggingFaceChat(model="meta-llama/LLaMA-3-2", token=hf_token) # U
24
  qna_prompt = ChatPromptTemplate.from_template("Answer the question: {question}")
25
 
26
  # Add a route for the QnA API using the LLaMA model
 
 
27
  add_routes(
28
  app,
29
  qna_prompt | llama_model,
 
24
  qna_prompt = ChatPromptTemplate.from_template("Answer the question: {question}")
25
 
26
  # Add a route for the QnA API using the LLaMA model
27
+ @app.post("/llm_api")
28
+ async def qna_endpoint(question: str):
29
  add_routes(
30
  app,
31
  qna_prompt | llama_model,