Haseeb-001 commited on
Commit
92abdfd
·
verified ·
1 Parent(s): ef6f24f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +104 -0
app.py CHANGED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, Request, HTTPException
2
+ from fastapi.responses import JSONResponse
3
+ from starlette.middleware.cors import CORSMiddleware
4
+ import rasa.core.interpreter
5
+ from rasa.core.agent import Agent
6
+ import asyncio
7
+ import json
8
+ import os
9
+ from dotenv import load_dotenv
10
+ import streamlit as st
11
+ import requests
12
+
13
+ load_dotenv()
14
+
15
+ app = FastAPI()
16
+
17
+ # CORS setup to allow Streamlit frontend to communicate with FastAPI backend
18
+ app.add_middleware(
19
+ CORSMiddleware,
20
+ allow_origins=["*"], # Allows all origins (for development, restrict in production)
21
+ allow_credentials=True,
22
+ allow_methods=["*"],
23
+ allow_headers=["*"],
24
+ )
25
+
26
+ # Rasa model directory and paths - adjust if needed, or use environment variables
27
+ RASA_MODEL_PATH = os.getenv("RASA_MODEL_PATH", "./models")
28
+ DOMAIN_PATH = os.getenv("DOMAIN_PATH", "./domain.yml")
29
+ NLU_MODEL_PATH = os.getenv("NLU_MODEL_PATH", None) # NLU model path is optional
30
+
31
+ # Load Rasa agent
32
+ async def load_rasa_agent():
33
+ interpreter = None
34
+ if NLU_MODEL_PATH:
35
+ interpreter = await rasa.core.interpreter.create_interpreter(NLU_MODEL_PATH)
36
+
37
+ agent = Agent.load(RASA_MODEL_PATH,
38
+ interpreter=interpreter,
39
+ domain=DOMAIN_PATH)
40
+ return agent
41
+
42
+ loop = asyncio.get_event_loop()
43
+ rasa_agent = loop.run_until_complete(load_rasa_agent())
44
+
45
+ # FastAPI endpoint for chat
46
+ @app.post("/chat")
47
+ async def chat_endpoint(message_data: dict):
48
+ """Rasa chatbot endpoint that takes message in JSON and returns bot response."""
49
+ user_message = message_data.get('message')
50
+
51
+ if not user_message:
52
+ raise HTTPException(status_code=400, detail="Message text not found")
53
+
54
+ response = await rasa_agent.handle_text(user_message)
55
+
56
+ bot_responses = []
57
+ for message in response:
58
+ if "text" in message:
59
+ bot_responses.append(message["text"])
60
+
61
+ return JSONResponse({"responses": bot_responses})
62
+
63
+
64
+ @app.get("/health")
65
+ async def health_check():
66
+ """Health check endpoint for monitoring."""
67
+ return JSONResponse({"status": "ok"})
68
+
69
+ # Streamlit UI
70
+ def main():
71
+ st.title("HealthVoice Bot")
72
+ st.write("Talk to the bot about your symptoms.")
73
+
74
+ user_input = st.text_input("Your message:", "")
75
+
76
+ if user_input:
77
+ api_url = "http://localhost:8000/chat" # FastAPI endpoint URL
78
+ message_payload = {"message": user_input}
79
+
80
+ try:
81
+ response = requests.post(api_url, json=message_payload)
82
+ response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
83
+ bot_response_json = response.json()
84
+ bot_messages = bot_response_json.get("responses", [])
85
+ for bot_message in bot_messages:
86
+ st.text_area("Bot Response:", value=bot_message, height=len(bot_message.split('\n')) * 25) # Adjust height dynamically
87
+
88
+ except requests.exceptions.RequestException as e:
89
+ st.error(f"Error communicating with the chatbot backend: {e}")
90
+
91
+
92
+ if __name__ == "__main__":
93
+ import uvicorn
94
+ import threading
95
+
96
+ # Start FastAPI app in a separate thread
97
+ def run_fastapi():
98
+ uvicorn.run(app, host="0.0.0.0", port=8000)
99
+
100
+ fastapi_thread = threading.Thread(target=run_fastapi)
101
+ fastapi_thread.daemon = True # Daemon threads are abruptly stopped at exit
102
+ fastapi_thread.start()
103
+
104
+ main() # Run Streamlit app in the main thread