SageWisp commited on
Commit
22e0324
·
1 Parent(s): 9403b9b

add flask files

Browse files
.gitignore ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ env/
12
+ build/
13
+ develop-eggs/
14
+ dist/
15
+ downloads/
16
+ eggs/
17
+ .eggs/
18
+ lib/
19
+ lib64/
20
+ parts/
21
+ sdist/
22
+ var/
23
+ *.egg-info/
24
+ .installed.cfg
25
+ *.egg
26
+
27
+ # PyInstaller
28
+ # Usually these files are written by a python script from a template
29
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
30
+ *.manifest
31
+ *.spec
32
+
33
+ # Installer logs
34
+ pip-log.txt
35
+ pip-delete-this-directory.txt
36
+
37
+ # Unit test / coverage reports
38
+ htmlcov/
39
+ .tox/
40
+ .nox/
41
+ .coverage
42
+ .coverage.*
43
+ .cache
44
+ nosetests.xml
45
+ coverage.xml
46
+ *.cover
47
+ .hypothesis/
48
+ .pytest_cache/
49
+
50
+ # Jupyter Notebook
51
+ .ipynb_checkpoints
52
+
53
+ # pyenv
54
+ .python-version
55
+
56
+ # mypy
57
+ .mypy_cache/
58
+ .dmypy.json
59
+
60
+ # Pyre type checker
61
+ .pyre/
62
+
63
+ # dotenv environment variables file
64
+ .env
65
+
66
+ # VS Code
67
+ .vscode/
68
+
69
+ # Exclude all PDF files
70
+ *.pdf
Dockerfile ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use Python 3.11 as the base image
2
+ FROM python:3.11-slim
3
+
4
+ # Set working directory
5
+ WORKDIR /app
6
+
7
+ # Install system dependencies
8
+ RUN apt-get update && apt-get install -y \
9
+ build-essential \
10
+ && rm -rf /var/lib/apt/lists/*
11
+
12
+ # Copy requirements first to leverage Docker cache
13
+ COPY requirements.txt .
14
+
15
+ # Install Python dependencies
16
+ RUN pip install --no-cache-dir -r requirements.txt
17
+
18
+ # Copy the rest of the application
19
+ COPY . .
20
+
21
+ # Create necessary directories if they don't exist
22
+ RUN mkdir -p private Knowledge_base templates
23
+
24
+ # Expose the port the app runs on
25
+ EXPOSE 7860
26
+
27
+ # Set environment variables
28
+ ENV FLASK_APP=app.py
29
+ ENV FLASK_ENV=production
30
+ ENV FLASK_RUN_PORT=7860
31
+
32
+ # Command to run the application
33
+ CMD ["flask", "run", "--host=0.0.0.0", "--port=7860"]
Knowledge_base/faiss_index.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:460e6632415b4d967dc7e2e45005fd701fff3cc5516b926d2d9e2329278ed3b1
3
+ size 391725
Knowledge_base/metadata.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16702041923f94f9b6536e771e1b53a8f802966d31709713280c38ee0e7a6aaa
3
+ size 251917
RAG.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import warnings
3
+ import pickle
4
+ from together import Together
5
+ import faiss
6
+ from sentence_transformers import SentenceTransformer
7
+ from PyPDF2 import PdfReader
8
+ import glob
9
+
10
+ warnings.filterwarnings("ignore")
11
+
12
+ # Hardcoded Together API Key
13
+ TOGETHER_API_KEY = "81da53aa3044c7ebead342fb048f016a4e593a86928a783a6fdcc1e3883054e4"
14
+ client = Together(api_key=TOGETHER_API_KEY)
15
+
16
+ # Initialize embedding model
17
+ embedding_model = SentenceTransformer(
18
+ "sentence-transformers/all-MiniLM-L6-v2",
19
+ use_auth_token=os.environ.get("HUGGINGFACE_HUB_TOKEN"),
20
+ )
21
+
22
+ # Global variables for storing embeddings and documents
23
+ documents = []
24
+ filenames = []
25
+ index = None
26
+
27
+ def load_index():
28
+ """Load the FAISS index and document metadata."""
29
+ global index, documents, filenames
30
+
31
+ if not os.path.exists("knowledge_base/faiss_index.bin") or not os.path.exists("knowledge_base/metadata.pkl"):
32
+ raise ValueError("Index files not found. Please run preprocess.py first!")
33
+
34
+ print("Loading index and metadata...")
35
+ index = faiss.read_index("knowledge_base/faiss_index.bin")
36
+
37
+ with open("knowledge_base/metadata.pkl", "rb") as f:
38
+ metadata = pickle.load(f)
39
+ documents = metadata["documents"]
40
+ filenames = metadata["filenames"]
41
+
42
+ print("Index and metadata loaded successfully!")
43
+
44
+ def answer_question(query: str) -> str:
45
+ """
46
+ Answer a question using the RAG system.
47
+
48
+ Args:
49
+ query (str): The user's question
50
+
51
+ Returns:
52
+ str: The generated answer
53
+ """
54
+ global index
55
+
56
+ # Load index if not already loaded
57
+ if index is None:
58
+ load_index()
59
+
60
+ # Get query embedding
61
+ query_embedding = embedding_model.encode([query])
62
+ faiss.normalize_L2(query_embedding)
63
+
64
+ # Get top similar documents
65
+ scores, indices = index.search(query_embedding, min(3, len(documents)))
66
+
67
+ # Build context from retrieved documents
68
+ context_parts = []
69
+ relevant_docs = []
70
+
71
+ for score, idx in zip(scores[0], indices[0]):
72
+ if idx < len(documents):
73
+ doc_info = {
74
+ "content": documents[idx],
75
+ "filename": filenames[idx],
76
+ "score": float(score),
77
+ }
78
+ relevant_docs.append(doc_info)
79
+ context_parts.append(f"[{doc_info['filename']}]\n{doc_info['content']}")
80
+
81
+ if not relevant_docs:
82
+ return "I couldn't find any relevant information to answer your question."
83
+
84
+ # Combine context
85
+ context = "\n\n".join(context_parts)
86
+
87
+ # Create prompt for LLM
88
+ llm_prompt = f"""Answer the question based on the provided context documents.
89
+
90
+ Context:
91
+ {context}
92
+
93
+ Question: {query}
94
+
95
+ Instructions:
96
+ - Answer based only on the information in the context
97
+ - If the context doesn't contain enough information, say so
98
+ - Mention which document(s) you're referencing
99
+ - Start with According to [document name]
100
+ - Add brackets to the document name
101
+
102
+ Answer:"""
103
+
104
+ try:
105
+ # Generate answer using Together AI
106
+ response = client.chat.completions.create(
107
+ model="meta-llama/Llama-3.3-70B-Instruct-Turbo",
108
+ messages=[{"role": "user", "content": llm_prompt}],
109
+ max_tokens=500,
110
+ temperature=0.7,
111
+ )
112
+ answer = response.choices[0].message.content
113
+
114
+ # Add source information
115
+ sources_list = [doc["filename"] for doc in relevant_docs]
116
+ sources_text = sources_list[0]
117
+ full_answer = f"{answer}\n\n📄 Source Used: {sources_text}"
118
+
119
+ return full_answer
120
+
121
+ except Exception as e:
122
+ return f"Error generating answer: {str(e)}"
app.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, render_template, request, jsonify
2
+ from dotenv import load_dotenv
3
+ import os
4
+ import json
5
+ import httpx
6
+
7
+ # Explicitly load the .env file from the script's directory
8
+ #dotenv_path = os.path.join(os.path.dirname(__file__), '.env')
9
+ #load_dotenv(dotenv_path=dotenv_path)
10
+
11
+ from RAG import answer_question
12
+
13
+ app = Flask(__name__)
14
+
15
+ # Load Surge configuration
16
+ try:
17
+ with open("private/surge_config.json", "r") as f:
18
+ surge_config = json.load(f)
19
+ except FileNotFoundError:
20
+ print("Warning: surge_config.json not found. SMS functionality will be disabled.")
21
+ surge_config = None
22
+
23
+ @app.route('/')
24
+ def index():
25
+ return render_template('index.html')
26
+
27
+ @app.route('/chat', methods=['POST'])
28
+ def chat():
29
+ try:
30
+ data = request.get_json()
31
+ user_question = data.get('question', '')
32
+
33
+ if not user_question:
34
+ return jsonify({'error': 'No question provided'}), 400
35
+
36
+ # Get answer from RAG system
37
+ answer = answer_question(user_question)
38
+
39
+ return jsonify({
40
+ 'answer': answer
41
+ })
42
+
43
+ except Exception as e:
44
+ return jsonify({'error': str(e)}), 500
45
+
46
+ @app.route('/send-sms', methods=['POST'])
47
+ def send_sms():
48
+ if not surge_config:
49
+ return jsonify({'error': 'SMS functionality not configured'}), 503
50
+
51
+ try:
52
+ data = request.get_json()
53
+ message_content = data.get('message', '')
54
+
55
+ if not message_content:
56
+ return jsonify({'error': 'No message provided'}), 400
57
+
58
+ # Send SMS via Surge API
59
+ with httpx.Client() as client:
60
+ response = client.post(
61
+ "https://api.surgemsg.com/messages",
62
+ headers={
63
+ "Authorization": f"Bearer {surge_config['api_key']}",
64
+ "Surge-Account": surge_config["account_id"],
65
+ "Content-Type": "application/json",
66
+ },
67
+ json={
68
+ "body": message_content,
69
+ "conversation": {
70
+ "contact": {
71
+ "first_name": surge_config["my_first_name"],
72
+ "last_name": surge_config["my_last_name"],
73
+ "phone_number": surge_config["my_phone_number"],
74
+ }
75
+ },
76
+ },
77
+ )
78
+ response.raise_for_status()
79
+ return jsonify({'status': 'Message sent successfully'})
80
+
81
+ except Exception as e:
82
+ return jsonify({'error': str(e)}), 500
83
+
84
+ if __name__ == '__main__':
85
+ app.run(debug=True)
preprocess.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import warnings
3
+ import pickle
4
+ import faiss
5
+ from sentence_transformers import SentenceTransformer
6
+ from PyPDF2 import PdfReader
7
+ import glob
8
+ from together import Together
9
+
10
+ warnings.filterwarnings("ignore")
11
+
12
+ # Hardcoded Together API Key
13
+ TOGETHER_API_KEY = "81da53aa3044c7ebead342fb048f016a4e593a86928a783a6fdcc1e3883054e4"
14
+ client = Together(api_key=TOGETHER_API_KEY)
15
+
16
+ # Initialize embedding model
17
+ embedding_model = SentenceTransformer(
18
+ "sentence-transformers/all-MiniLM-L6-v2",
19
+ use_auth_token=os.environ.get("HUGGINGFACE_HUB_TOKEN"),
20
+ )
21
+
22
+ def extract_text_from_pdf(pdf_path):
23
+ """Extract text from a PDF file."""
24
+ try:
25
+ reader = PdfReader(pdf_path)
26
+ text = ""
27
+ for page in reader.pages:
28
+ text += page.extract_text() + "\n"
29
+ return text.strip()
30
+ except Exception as e:
31
+ print(f"Error processing {pdf_path}: {str(e)}")
32
+ return ""
33
+
34
+ def create_index():
35
+ """Create and save the FAISS index and document metadata."""
36
+ # Create knowledge_base directory if it doesn't exist
37
+ os.makedirs("knowledge_base", exist_ok=True)
38
+
39
+ # Get all PDF files from knowledge_base directory
40
+ pdf_files = glob.glob("Knowledge_base/*.pdf")
41
+
42
+ if not pdf_files:
43
+ raise ValueError("No PDF files found in Knowledge_base directory!")
44
+
45
+ print(f"Found {len(pdf_files)} PDF files. Processing...")
46
+
47
+ # Process documents
48
+ documents = []
49
+ filenames = []
50
+
51
+ for pdf_path in pdf_files:
52
+ filename = os.path.basename(pdf_path)
53
+ content = extract_text_from_pdf(pdf_path)
54
+
55
+ if content:
56
+ # Split content into chunks (roughly 1000 characters each)
57
+ chunks = [content[i:i+1000] for i in range(0, len(content), 1000)]
58
+
59
+ for i, chunk in enumerate(chunks):
60
+ if chunk.strip():
61
+ documents.append(chunk)
62
+ filenames.append(f"{filename} (chunk {i+1})")
63
+
64
+ if not documents:
65
+ raise ValueError("No valid content extracted from PDFs!")
66
+
67
+ print(f"Successfully processed {len(documents)} chunks from {len(pdf_files)} PDFs")
68
+
69
+ # Create embeddings
70
+ print("Creating embeddings...")
71
+ embeddings = embedding_model.encode(documents)
72
+
73
+ # Set up FAISS index
74
+ dimension = embeddings.shape[1]
75
+ index = faiss.IndexFlatIP(dimension)
76
+
77
+ # Normalize embeddings for cosine similarity
78
+ faiss.normalize_L2(embeddings)
79
+ index.add(embeddings)
80
+
81
+ # Save the index and metadata
82
+ print("Saving index and metadata...")
83
+ faiss.write_index(index, "knowledge_base/faiss_index.bin")
84
+
85
+ metadata = {
86
+ "documents": documents,
87
+ "filenames": filenames
88
+ }
89
+
90
+ with open("knowledge_base/metadata.pkl", "wb") as f:
91
+ pickle.dump(metadata, f)
92
+
93
+ print("Index and metadata saved successfully!")
94
+
95
+ if __name__ == "__main__":
96
+ create_index()
private/surge_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "api_key": "sk_live_b64524jqzpp4koyu2lzgzlhdaglxdppidj2azpwerqjnxkphpmn4r6wb",
3
+ "account_id": "acct_01jxrvt3v0f8t8qs1aewyx021m",
4
+ "my_phone_number": "6044467277",
5
+ "my_first_name": "Lei Lei",
6
+ "my_last_name": "Wu"
7
+ }
requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ flask==3.0.2
2
+ python-dotenv==1.0.1
3
+ httpx==0.27.0
4
+ together==0.2.5
5
+ faiss-cpu==1.7.4
6
+ sentence-transformers==2.5.1
7
+ PyPDF2==3.0.1
templates/index.html ADDED
@@ -0,0 +1,211 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Kidney Key Chat</title>
7
+ <link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet">
8
+ <style>
9
+ body {
10
+ background: linear-gradient(135deg, #e0eafc 0%, #cfdef3 100%);
11
+ min-height: 100vh;
12
+ }
13
+ .app-container {
14
+ max-width: 700px;
15
+ margin: 40px auto;
16
+ background: #fff;
17
+ border-radius: 18px;
18
+ box-shadow: 0 8px 32px 0 rgba(31, 38, 135, 0.18);
19
+ padding: 32px 28px 24px 28px;
20
+ }
21
+ .chat-container {
22
+ height: 400px;
23
+ overflow-y: auto;
24
+ border: 1.5px solid #b6c6e0;
25
+ padding: 24px 18px;
26
+ margin-bottom: 24px;
27
+ border-radius: 12px;
28
+ background: #f8fbff;
29
+ }
30
+ .message {
31
+ margin-bottom: 18px;
32
+ padding: 14px 18px;
33
+ border-radius: 12px;
34
+ font-size: 1.08rem;
35
+ line-height: 1.5;
36
+ max-width: 80%;
37
+ word-break: break-word;
38
+ box-shadow: 0 2px 8px rgba(80, 120, 200, 0.07);
39
+ }
40
+ .user-message {
41
+ background: linear-gradient(90deg, #a1c4fd 0%, #c2e9fb 100%);
42
+ color: #1a237e;
43
+ margin-left: auto;
44
+ text-align: right;
45
+ }
46
+ .bot-message {
47
+ background: linear-gradient(90deg, #fbc2eb 0%, #a6c1ee 100%);
48
+ color: #4a148c;
49
+ margin-right: auto;
50
+ text-align: left;
51
+ }
52
+ .loading {
53
+ display: none;
54
+ text-align: center;
55
+ margin: 10px 0;
56
+ }
57
+ .sms-button {
58
+ margin-top: 10px;
59
+ }
60
+ .app-title {
61
+ font-family: 'Segoe UI', 'Roboto', Arial, sans-serif;
62
+ font-weight: 700;
63
+ font-size: 2.3rem;
64
+ color: #283593;
65
+ letter-spacing: 1px;
66
+ }
67
+ .app-desc {
68
+ font-size: 1.08rem;
69
+ color: #374151;
70
+ margin-bottom: 28px;
71
+ margin-top: 8px;
72
+ background: #e3f0ff;
73
+ border-radius: 8px;
74
+ padding: 12px 18px;
75
+ }
76
+ .input-group input {
77
+ font-size: 1.08rem;
78
+ border-radius: 8px 0 0 8px;
79
+ }
80
+ .input-group .btn-primary {
81
+ border-radius: 0 8px 8px 0;
82
+ font-weight: 600;
83
+ background: linear-gradient(90deg, #43cea2 0%, #185a9d 100%);
84
+ border: none;
85
+ }
86
+ .input-group .btn-primary:hover {
87
+ background: linear-gradient(90deg, #185a9d 0%, #43cea2 100%);
88
+ }
89
+ .btn-success.sms-button {
90
+ background: linear-gradient(90deg, #f7971e 0%, #ffd200 100%);
91
+ color: #333;
92
+ border: none;
93
+ font-weight: 600;
94
+ }
95
+ .btn-success.sms-button:hover {
96
+ background: linear-gradient(90deg, #ffd200 0%, #f7971e 100%);
97
+ color: #222;
98
+ }
99
+ @media (max-width: 600px) {
100
+ .app-container {
101
+ padding: 10px 2px 18px 2px;
102
+ }
103
+ .chat-container {
104
+ padding: 10px 4px;
105
+ }
106
+ }
107
+ </style>
108
+ </head>
109
+ <body>
110
+ <div class="app-container">
111
+ <h1 class="text-center app-title mb-2">Kidney Key Chat</h1>
112
+ <div class="app-desc text-center">
113
+ <strong>What is this?</strong> <br>
114
+ Kidney Key Chat is an AI-powered assistant designed to help healthcare professionals and patients with questions about renal dosing, dialysis, and medication adjustments for kidney function. <br><br>
115
+ <strong>How does it work?</strong> <br>
116
+ Just type your question below (e.g., "How do you dose amoxicillin in CKD?"). The app uses Retrieval-Augmented Generation (RAG) to search a curated knowledge base of clinical PDFs, finds the most relevant information, and generates a clear, referenced answer. You can also send the chatbot's response to your phone via SMS for easy reference.
117
+ </div>
118
+ <div class="chat-container" id="chatContainer">
119
+ <!-- Messages will be added here -->
120
+ </div>
121
+ <div class="loading" id="loading">
122
+ <div class="spinner-border text-primary" role="status">
123
+ <span class="visually-hidden">Loading...</span>
124
+ </div>
125
+ </div>
126
+ <div class="input-group mb-3">
127
+ <input type="text" id="userInput" class="form-control" placeholder="Type your question here...">
128
+ <button class="btn btn-primary" onclick="sendMessage()">Send</button>
129
+ </div>
130
+ <div class="text-center">
131
+ <button class="btn btn-success sms-button" onclick="sendLastResponse()" id="smsButton" style="display: none;">
132
+ Send Last Response via SMS
133
+ </button>
134
+ </div>
135
+ </div>
136
+ <script>
137
+ let lastBotResponse = '';
138
+ function addMessage(message, isUser) {
139
+ const chatContainer = document.getElementById('chatContainer');
140
+ const messageDiv = document.createElement('div');
141
+ messageDiv.className = `message ${isUser ? 'user-message' : 'bot-message'}`;
142
+ messageDiv.textContent = message;
143
+ chatContainer.appendChild(messageDiv);
144
+ chatContainer.scrollTop = chatContainer.scrollHeight;
145
+ }
146
+ function showLoading() {
147
+ document.getElementById('loading').style.display = 'block';
148
+ }
149
+ function hideLoading() {
150
+ document.getElementById('loading').style.display = 'none';
151
+ }
152
+ async function sendMessage() {
153
+ const userInput = document.getElementById('userInput');
154
+ const message = userInput.value.trim();
155
+ if (!message) return;
156
+ addMessage(message, true);
157
+ userInput.value = '';
158
+ showLoading();
159
+ try {
160
+ const response = await fetch('/chat', {
161
+ method: 'POST',
162
+ headers: {
163
+ 'Content-Type': 'application/json',
164
+ },
165
+ body: JSON.stringify({ question: message }),
166
+ });
167
+ const data = await response.json();
168
+ if (response.ok) {
169
+ lastBotResponse = data.answer;
170
+ addMessage(data.answer, false);
171
+ document.getElementById('smsButton').style.display = 'inline-block';
172
+ } else {
173
+ addMessage('Error: ' + data.error, false);
174
+ }
175
+ } catch (error) {
176
+ addMessage('Error: Could not connect to the server', false);
177
+ } finally {
178
+ hideLoading();
179
+ }
180
+ }
181
+ async function sendLastResponse() {
182
+ if (!lastBotResponse) return;
183
+ showLoading();
184
+ try {
185
+ const response = await fetch('/send-sms', {
186
+ method: 'POST',
187
+ headers: {
188
+ 'Content-Type': 'application/json',
189
+ },
190
+ body: JSON.stringify({ message: lastBotResponse }),
191
+ });
192
+ const data = await response.json();
193
+ if (response.ok) {
194
+ alert('Message sent successfully!');
195
+ } else {
196
+ alert('Error: ' + data.error);
197
+ }
198
+ } catch (error) {
199
+ alert('Error: Could not send SMS');
200
+ } finally {
201
+ hideLoading();
202
+ }
203
+ }
204
+ document.getElementById('userInput').addEventListener('keypress', function(e) {
205
+ if (e.key === 'Enter') {
206
+ sendMessage();
207
+ }
208
+ });
209
+ </script>
210
+ </body>
211
+ </html>