pvanand commited on
Commit
0f54295
1 Parent(s): 315723f

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +0 -47
main.py CHANGED
@@ -232,53 +232,6 @@ async def chat(request: ChatRequest, background_tasks: BackgroundTasks, api_key:
232
 
233
 
234
 
235
- @app.post("/digiyatra-chat/", response_class=StreamingResponse, tags=["Chat"])
236
- async def chat(request: ChatRequest, background_tasks: BackgroundTasks, api_key: str = Depends(get_api_key)):
237
- try:
238
- document_list = load_embeddings(request.index_id)
239
- search_results = embeddings.search(request.query, 6)
240
- context = "\n".join([document_list[idx[0]] for idx in search_results])
241
-
242
- rag_prompt = f"Based on the following context, please answer the user's question:\n\nContext:\n{context}\n\nUser's question: {request.query}\n\nAnswer:"
243
- system_prompt = "You are a helpful assistant tasked with providing answers using the context provided"
244
-
245
- conversation_id = request.conversation_id or str(uuid.uuid4())
246
-
247
- if request.enable_followup:
248
- llm_request = {
249
- "query": rag_prompt,
250
- "model_id": 'openai/gpt-4o-mini',
251
- "conversation_id": conversation_id,
252
- "user_id": request.user_id
253
- }
254
- endpoint_url = "https://pvanand-general-chat.hf.space/digiyatra-followup"
255
- else:
256
- llm_request = {
257
- "prompt": rag_prompt,
258
- "system_message": system_prompt,
259
- "model_id": request.model_id,
260
- "conversation_id": conversation_id,
261
- "user_id": request.user_id
262
- }
263
- endpoint_url = "https://pvanand-audio-chat.hf.space/llm-agent"
264
-
265
- logger.info(f"Starting chat response generation for user: {request.user_id} Full request: {llm_request}")
266
-
267
- def response_generator():
268
- full_response = ""
269
- for chunk in stream_llm_request(api_key, llm_request, endpoint_url):
270
- full_response += chunk
271
- yield chunk
272
- logger.info(f"Finished chat response generation for user: {request.user_id} Full response: {full_response}")
273
-
274
- return StreamingResponse(response_generator(), media_type="text/event-stream")
275
-
276
- except Exception as e:
277
- logger.error(f"Error in chat endpoint: {str(e)}")
278
- raise HTTPException(status_code=500, detail=f"Error in chat endpoint: {str(e)}")
279
-
280
-
281
-
282
  @app.on_event("startup")
283
  async def startup_event():
284
  check_and_index_csv_files()
 
232
 
233
 
234
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
235
  @app.on_event("startup")
236
  async def startup_event():
237
  check_and_index_csv_files()