File size: 15,999 Bytes
ebd1bb7
34d17cd
 
 
 
e585eee
 
34d17cd
 
 
 
 
 
 
0d47f17
34d17cd
 
 
 
 
 
 
e585eee
34d17cd
 
 
 
 
 
 
 
 
 
 
e585eee
 
f72d8ff
54f6686
943da33
 
e585eee
34d17cd
b129b7d
accbd8f
78c0ec0
34d17cd
e585eee
 
ac2e91b
7864980
ac2e91b
e585eee
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7864980
e585eee
 
 
 
7864980
e585eee
 
 
 
 
 
 
 
 
522caaa
e585eee
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c48ef60
f648e72
e585eee
 
 
 
 
 
 
c48ef60
 
 
f648e72
e585eee
 
 
ad4f466
da1e0d4
 
e585eee
7864980
e585eee
da1e0d4
ad4f466
7864980
 
 
ad4f466
 
 
 
 
 
7864980
ad4f466
e585eee
ad4f466
7f7f33b
7864980
ad4f466
 
f648e72
e585eee
 
 
f648e72
 
 
 
e585eee
 
 
f648e72
 
6e0b574
 
 
 
e585eee
6e0b574
e585eee
6e0b574
 
 
7f7f33b
7864980
6e0b574
e585eee
f648e72
e585eee
 
 
 
f648e72
7864980
6e0b574
e585eee
 
6e0b574
 
 
e585eee
 
 
 
da1e0d4
6e0b574
7864980
 
e585eee
 
c48ef60
6e0b574
 
7f7f33b
7864980
6e0b574
 
2a25020
f648e72
 
e585eee
c48ef60
e585eee
f648e72
8c472a8
34d17cd
accbd8f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
from flask import Flask, render_template, request, redirect, url_for, send_from_directory
from flask_socketio import SocketIO
import threading
import os
from dotenv import load_dotenv
import sqlite3
from werkzeug.utils import secure_filename

# LangChain and agent imports
from langchain_community.chat_models.huggingface import ChatHuggingFace  # if needed later
from langchain.agents import Tool
from langchain.agents.format_scratchpad import format_log_to_str
from langchain.agents.output_parsers import ReActJsonSingleInputOutputParser
from langchain_core.callbacks import CallbackManager, BaseCallbackHandler
from langchain_community.agent_toolkits.load_tools import load_tools 
from langchain_core.tools import tool
from langchain_community.agent_toolkits import PowerBIToolkit
from langchain.chains import LLMMathChain
from langchain import hub
from langchain_community.tools import DuckDuckGoSearchRun

# Agent requirements and type hints
from typing import Annotated, Literal, TypedDict, Any
from langchain_core.messages import AIMessage, ToolMessage
from pydantic import BaseModel, Field
from typing_extensions import TypedDict
from langgraph.graph import END, StateGraph, START
from langgraph.graph.message import AnyMessage, add_messages
from langchain_core.runnables import RunnableLambda, RunnableWithFallbacks
from langgraph.prebuilt import ToolNode

# Load environment variables
load_dotenv()

# Global configuration variables
UPLOAD_FOLDER = os.path.join(os.getcwd(), "uploads")
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
DATABASE_URI = f"sqlite:///{os.path.join(BASE_DIR, 'data', 'mydb.db')}"
print("DATABASE URI:", DATABASE_URI)

# API Keys from .env file
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
MISTRAL_API_KEY = os.getenv("MISTRAL_API_KEY")
os.environ["GROQ_API_KEY"] = GROQ_API_KEY
os.environ["MISTRAL_API_KEY"] = MISTRAL_API_KEY

# Global variables for dynamic agent and DB file path; initially None.
agent_app = None
abs_file_path = None
db_path = None

# =============================================================================
# create_agent_app: Given a database path, initialize the agent workflow.
# =============================================================================
def create_agent_app(db_path: str):
    # Use ChatGroq as our LLM here; you can swap to ChatMistralAI if preferred.
    from langchain_groq import ChatGroq
    llm = ChatGroq(model="llama3-70b-8192")

    # -------------------------------------------------------------------------
    # Define a tool for executing SQL queries.
    # -------------------------------------------------------------------------
    @tool
    def db_query_tool(query: str) -> str:
        result = db_instance.run_no_throw(query)
        return result if result else "Error: Query failed. Please rewrite your query and try again."

    # -------------------------------------------------------------------------
    # Pydantic model for final answer
    # -------------------------------------------------------------------------
    class SubmitFinalAnswer(BaseModel):
        final_answer: str = Field(..., description="The final answer to the user")

    # -------------------------------------------------------------------------
    # Define state type for our workflow.
    # -------------------------------------------------------------------------
    class State(TypedDict):
        messages: Annotated[list[AnyMessage], add_messages]

    # -------------------------------------------------------------------------
    # Set up prompt templates (using langchain_core.prompts) for query checking
    # and query generation.
    # -------------------------------------------------------------------------
    from langchain_core.prompts import ChatPromptTemplate

    query_check_system = (
        "You are a SQL expert with a strong attention to detail.\n"
        "Double check the SQLite query for common mistakes, including:\n"
        "- Using NOT IN with NULL values\n"
        "- Using UNION when UNION ALL should have been used\n"
        "- Using BETWEEN for exclusive ranges\n"
        "- Data type mismatch in predicates\n"
        "- Properly quoting identifiers\n"
        "- Using the correct number of arguments for functions\n"
        "- Casting to the correct data type\n"
        "- Using the proper columns for joins\n\n"
        "If there are any of the above mistakes, rewrite the query. If there are no mistakes, just reproduce the original query.\n"
        "You will call the appropriate tool to execute the query after running this check."
    )
    query_check_prompt = ChatPromptTemplate.from_messages([
        ("system", query_check_system),
        ("placeholder", "{messages}")
    ])
    query_check = query_check_prompt | llm.bind_tools([db_query_tool])

    query_gen_system = (
        "You are a SQL expert with a strong attention to detail.\n\n"
        "Given an input question, output a syntactically correct SQLite query to run, then look at the results of the query and return the answer.\n\n"
        "DO NOT call any tool besides SubmitFinalAnswer to submit the final answer.\n\n"
        "When generating the query:\n"
        "Output the SQL query that answers the input question without a tool call.\n"
        "Unless the user specifies a specific number of examples they wish to obtain, always limit your query to at most 5 results.\n"
        "You can order the results by a relevant column to return the most interesting examples in the database.\n"
        "Never query for all the columns from a specific table, only ask for the relevant columns given the question.\n\n"
        "If you get an error while executing a query, rewrite the query and try again.\n"
        "If you get an empty result set, you should try to rewrite the query to get a non-empty result set.\n"
        "NEVER make stuff up if you don't have enough information to answer the query... just say you don't have enough information.\n\n"
        "If you have enough information to answer the input question, simply invoke the appropriate tool to submit the final answer to the user.\n"
        "DO NOT make any DML statements (INSERT, UPDATE, DELETE, DROP etc.) to the database. Do not return any sql query except answer."
    )
    query_gen_prompt = ChatPromptTemplate.from_messages([
        ("system", query_gen_system),
        ("placeholder", "{messages}")
    ])
    query_gen = query_gen_prompt | llm.bind_tools([SubmitFinalAnswer])

    # Update database URI and file path
    abs_db_path_local = os.path.abspath(db_path)
    global DATABASE_URI
    DATABASE_URI = abs_db_path_local
    db_uri = f"sqlite:///{abs_db_path_local}"
    print("db_uri", db_uri)
    flash(f"db_uri:{db_uri}", "warning")
    # Create SQLDatabase connection using langchain utility.
    from langchain_community.utilities import SQLDatabase
    db_instance = SQLDatabase.from_uri(db_uri)
    print("db_instance----->", db_instance)
    flash(f"db_instance:{db_instance}", "warning")

    # Create SQL toolkit.
    from langchain_community.agent_toolkits import SQLDatabaseToolkit
    toolkit_instance = SQLDatabaseToolkit(db=db_instance, llm=llm)
    tools_instance = toolkit_instance.get_tools()

    # Define workflow nodes and fallback functions.
    def first_tool_call(state: State) -> dict[str, list[AIMessage]]:
        return {"messages": [AIMessage(content="", tool_calls=[{"name": "sql_db_list_tables", "args": {}, "id": "tool_abcd123"}])]}

    def handle_tool_error(state: State) -> dict:
        error = state.get("error")
        tool_calls = state["messages"][-1].tool_calls
        return {"messages": [
            ToolMessage(content=f"Error: {repr(error)}. Please fix your mistakes.", tool_call_id=tc["id"])
            for tc in tool_calls
        ]}

    def create_tool_node_with_fallback(tools_list: list) -> RunnableWithFallbacks[Any, dict]:
        return ToolNode(tools_list).with_fallbacks([RunnableLambda(handle_tool_error)], exception_key="error")

    def query_gen_node(state: State):
        message = query_gen.invoke(state)
        tool_messages = []
        if message.tool_calls:
            for tc in message.tool_calls:
                if tc["name"] != "SubmitFinalAnswer":
                    tool_messages.append(ToolMessage(
                        content=f"Error: The wrong tool was called: {tc['name']}. Please fix your mistakes.",
                        tool_call_id=tc["id"]
                    ))
        return {"messages": [message] + tool_messages}

    def should_continue(state: State) -> Literal[END, "correct_query", "query_gen"]:
        messages = state["messages"]
        last_message = messages[-1]
        if getattr(last_message, "tool_calls", None):
            return END
        if last_message.content.startswith("Error:"):
            return "query_gen"
        return "correct_query"

    def model_check_query(state: State) -> dict[str, list[AIMessage]]:
        return {"messages": [query_check.invoke({"messages": [state["messages"][-1]]})]}

    # Get table listing and schema tools.
    list_tables_tool = next((tool for tool in tools_instance if tool.name == "sql_db_list_tables"), None)
    get_schema_tool = next((tool for tool in tools_instance if tool.name == "sql_db_schema"), None)

    workflow = StateGraph(State)
    workflow.add_node("first_tool_call", first_tool_call)
    workflow.add_node("list_tables_tool", create_tool_node_with_fallback([list_tables_tool]))
    workflow.add_node("get_schema_tool", create_tool_node_with_fallback([get_schema_tool]))
    model_get_schema = llm.bind_tools([get_schema_tool])
    workflow.add_node("model_get_schema", lambda state: {"messages": [model_get_schema.invoke(state["messages"])],})
    workflow.add_node("query_gen", query_gen_node)
    workflow.add_node("correct_query", model_check_query)
    workflow.add_node("execute_query", create_tool_node_with_fallback([db_query_tool]))

    workflow.add_edge(START, "first_tool_call")
    workflow.add_edge("first_tool_call", "list_tables_tool")
    workflow.add_edge("list_tables_tool", "model_get_schema")
    workflow.add_edge("model_get_schema", "get_schema_tool")
    workflow.add_edge("get_schema_tool", "query_gen")
    workflow.add_conditional_edges("query_gen", should_continue)
    workflow.add_edge("correct_query", "execute_query")
    workflow.add_edge("execute_query", "query_gen")

    # Return compiled workflow
    return workflow.compile()

# =============================================================================
# create_app: The application factory.
# =============================================================================
def create_app():
    # Configure static folder for uploads.
    flask_app = Flask(__name__, static_url_path='/uploads', static_folder='uploads')
    socketio = SocketIO(flask_app, cors_allowed_origins="*")

    # Ensure uploads folder exists.
    if not os.path.exists(UPLOAD_FOLDER):
        os.makedirs(UPLOAD_FOLDER)
    flask_app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER

    # Serve uploaded files via a custom route.
    @flask_app.route("/files/<path:filename>")
    def uploaded_file(filename):
        return send_from_directory(flask_app.config['UPLOAD_FOLDER'], filename)

    # -------------------------------------------------------------------------
    # Helper: run_agent runs the agent with the given prompt.
    # -------------------------------------------------------------------------
    def run_agent(prompt, socketio):
        global agent_app
        if agent_app is None:
            socketio.emit("log", {"message": "[ERROR]: No database has been uploaded. Upload a database file first."})
            flash(f"[ERROR]: No database has been uploaded. Upload a database file first.", "error")
            socketio.emit("final", {"message": "No database available. Upload one and try again."})
            return
        try:
            abs_file_path = os.path.abspath(db_path)
            agent_app = create_agent_app(abs_file_path)
            
            query = {"messages": [("user", prompt)]}
            result = agent_app.invoke(query)
            try:
                result = result["messages"][-1].tool_calls[0]["args"]["final_answer"]
            except Exception:
                result = "Query failed or no valid answer found."
                flash(f"[ERROR]: Query failed or no valid answer found.", "error")
            print("final_answer------>", result)
            socketio.emit("final", {"message": result})
        except Exception as e:
            print(f"[ERROR]: {str(e)}")
            flash(f"[ERROR]: {str(e)}", "error")
            socketio.emit("log", {"message": f"[ERROR]: {str(e)}"})
            socketio.emit("final", {"message": "Generation failed."})

    # -------------------------------------------------------------------------
    # Route: index page
    # -------------------------------------------------------------------------
    @flask_app.route("/")
    def index():
        return render_template("index.html")

    # -------------------------------------------------------------------------
    # Route: generate (POST) – receives a prompt, runs the agent.
    # -------------------------------------------------------------------------
    @flask_app.route("/generate", methods=["POST"])
    def generate():
        try:
            socketio.emit("log", {"message": "[STEP]: Entering query_gen..."})
            data = request.json
            prompt = data.get("prompt", "")
            socketio.emit("log", {"message": f"[INFO]: Received prompt: {prompt}"})
            thread = threading.Thread(target=run_agent, args=(prompt, socketio))
            socketio.emit("log", {"message": f"[INFO]: Starting thread: {thread}"})
            thread.start()
            return "OK", 200
        except Exception as e:
            print(f"[ERROR]: {str(e)}")
            flash(f"[ERROR]: {str(e)}", "error")
            socketio.emit("log", {"message": f"[ERROR]: {str(e)}"})
            return "ERROR", 500

    # -------------------------------------------------------------------------
    # Route: upload (GET/POST) – handles uploading the SQLite DB file.
    # -------------------------------------------------------------------------
    @flask_app.route("/upload", methods=["GET", "POST"])
    def upload():
        global abs_file_path, agent_app, db_path
        try:
            if request.method == "POST":
                file = request.files.get("file")
                if not file:
                    print("No file uploaded")
                    return "No file uploaded", 400
                # Secure the filename to avoid path traversal issues.
                filename = secure_filename(file.filename)
                if filename.endswith('.db'):
                    db_path = os.path.join(flask_app.config['UPLOAD_FOLDER'], "uploaded.db")
                    print("Saving file to:", db_path)
                    file.save(db_path)
                    #abs_file_path = os.path.abspath(db_path)
                    #agent_app = create_agent_app(abs_file_path)
                    print(f"[INFO]: Database file '{filename}' uploaded and loaded.")
                    socketio.emit("log", {"message": f"[INFO]: Database file '{filename}' uploaded and loaded."})
                    return redirect(url_for("index"))
            return render_template("upload.html")
        except Exception as e:
            print(f"[ERROR]: {str(e)}")
            flash(f"[ERROR]: {str(e)}", "error")
            socketio.emit("log", {"message": f"[ERROR]: {str(e)}"})
            return render_template("upload.html")

    return flask_app, socketio

# =============================================================================
# Create the app for Gunicorn compatibility.
# =============================================================================
app, socketio_instance = create_app()

if __name__ == "__main__":
    socketio_instance.run(app, debug=True)