csepartha commited on
Commit
8f7f7d7
·
verified ·
1 Parent(s): aa8abf8

Upload 6 files

Browse files
Files changed (6) hide show
  1. Dockerfile +22 -0
  2. README.md +1 -10
  3. client.py +172 -0
  4. demo.launcher +8 -0
  5. requirements.txt +7 -0
  6. server.py +79 -0
Dockerfile ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM ubuntu:22.04
2
+
3
+ # System deps
4
+ :contentReference[oaicite:10]{index=10}
5
+
6
+ # Install Ollama
7
+ :contentReference[oaicite:11]{index=11}
8
+
9
+ # Copy python deps
10
+ :contentReference[oaicite:12]{index=12}
11
+ :contentReference[oaicite:13]{index=13}
12
+
13
+ # Copy project
14
+ COPY . .
15
+
16
+ # Non-root user
17
+ :contentReference[oaicite:14]{index=14}
18
+ USER user
19
+
20
+ EXPOSE 7860
21
+ :contentReference[oaicite:15]{index=15}
22
+
README.md CHANGED
@@ -1,12 +1,3 @@
1
- ---
2
- title: Ollama Mcp Gradio
3
- emoji: 📚
4
- colorFrom: indigo
5
- colorTo: yellow
6
  sdk: docker
7
- pinned: false
8
- license: mit
9
- short_description: Ollama MCP Gradio App
10
- ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
1
  sdk: docker
2
+ app_port: 7860
 
 
 
3
 
 
client.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # client.py
2
+
3
+
4
+ import re
5
+ import time
6
+ import sqlite3
7
+ import asyncio
8
+ import gradio as gr
9
+ from gradio.queueing import Queue
10
+
11
+ # Patch Gradio queue to avoid NoneType error
12
+ if not hasattr(gr.Blocks, "_queue") or gr.Blocks._queue is None:
13
+ gr.Blocks._queue = Queue(
14
+ live_updates=False,
15
+ concurrency_count=1,
16
+ update_intervals=[],
17
+ max_size=64,
18
+ blocks=None
19
+ )
20
+
21
+ # Ollama import (new & legacy)
22
+ try:
23
+ from llama_index.llms.ollama import Ollama
24
+ except ImportError:
25
+ from llama_index.legacy.llms.ollama import Ollama
26
+
27
+ # MCP imports (new & legacy)
28
+ try:
29
+ from llama_index.tools.mcp import BasicMCPClient, McpToolSpec
30
+ except ImportError:
31
+ from llama_index.legacy.tools.mcp import BasicMCPClient, McpToolSpec
32
+
33
+ from llama_index.core.agent.workflow import FunctionAgent
34
+ from llama_index.core.workflow import Context
35
+
36
+ loop = asyncio.new_event_loop()
37
+ asyncio.set_event_loop(loop)
38
+
39
+ llm = Ollama(model="granite3.1-moe", request_timeout=300.0)
40
+
41
+ MCP_SERVER_URL = "http://127.0.0.1:11434/mcp/http"
42
+ mcp_client = BasicMCPClient(MCP_SERVER_URL)
43
+ mcp_spec = McpToolSpec(client=mcp_client)
44
+
45
+ async def init_agent():
46
+ print("🔵 Fetching tools…")
47
+ tools = await mcp_spec.to_tool_list_async()
48
+ print(f"🔵 Loaded {len(tools)} tools.")
49
+ agent = FunctionAgent(
50
+ name="SQLiteAgent",
51
+ description="Agent for SQLite people DB via MCP",
52
+ tools=tools,
53
+ llm=llm,
54
+ system_prompt="You are an assistant. Use the tools to read/write the people database.",
55
+ )
56
+ print("🔵 Agent ready.")
57
+ return agent, Context(agent)
58
+
59
+ agent, agent_context = loop.run_until_complete(init_agent())
60
+ print("✅ Agent & Context initialized.")
61
+
62
+
63
+ def clean_response(text: str) -> str:
64
+ cleaned = re.sub(r"<think>.*?</think>\s*", "", text, flags=re.DOTALL)
65
+ return cleaned.strip()
66
+
67
+ async def async_handle_message(msg: str) -> str:
68
+ print(f"\n🟢 USER: {msg}")
69
+ handler = agent.run(msg, ctx=agent_context)
70
+ async for event in handler.stream_events():
71
+ if hasattr(event, "tool_name"):
72
+ print(f"🔧 ToolCall → {event.tool_name}")
73
+ try:
74
+ raw = await handler
75
+ raw_text = str(raw)
76
+ except Exception as e:
77
+ raw_text = f"⚠️ [ERROR] {e}"
78
+ print(f"🟣 RAW RESPONSE: {repr(raw_text)}")
79
+ cleaned = clean_response(raw_text)
80
+ print(f"🟣 CLEANED RESPONSE: {repr(cleaned)}")
81
+ return cleaned or "⚠️ (empty response)"
82
+
83
+
84
+ def handle_message(message, chat_history):
85
+ if not isinstance(chat_history, list) or any(not isinstance(m, dict) for m in chat_history):
86
+ chat_history = []
87
+
88
+ chat_history.append({"role": "user", "content": message})
89
+
90
+ start = time.time()
91
+ reply = loop.run_until_complete(async_handle_message(message))
92
+ end = time.time()
93
+
94
+ chat_history.append({"role": "assistant", "content": reply})
95
+
96
+ try:
97
+ db_conn = sqlite3.connect("demo.db")
98
+ db_cursor = db_conn.cursor()
99
+ db_cursor.execute("""
100
+ CREATE TABLE IF NOT EXISTS interactions (
101
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
102
+ prompt TEXT NOT NULL,
103
+ response TEXT NOT NULL,
104
+ time_taken_sec REAL NOT NULL,
105
+ timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
106
+ )
107
+ """)
108
+ db_cursor.execute(
109
+ "INSERT INTO interactions (prompt, response, time_taken_sec) VALUES (?, ?, ?)",
110
+ (message, reply, round(end - start, 3))
111
+ )
112
+ db_conn.commit()
113
+ db_conn.close()
114
+ print(f"[DB] Logged interaction in {round(end - start, 3)} sec")
115
+ except Exception as e:
116
+ print(f"[DB ERROR] {e}")
117
+
118
+ return chat_history, ""
119
+
120
+
121
+ def fetch_recent_interactions(limit=5):
122
+ try:
123
+ conn = sqlite3.connect("demo.db")
124
+ cursor = conn.cursor()
125
+ cursor.execute("SELECT prompt, response, time_taken_sec, timestamp FROM interactions ORDER BY id DESC LIMIT ?", (limit,))
126
+ rows = cursor.fetchall()
127
+ conn.close()
128
+ return rows
129
+ except Exception as e:
130
+ return [("Error fetching interactions", str(e), 0, "")]
131
+
132
+
133
+ with gr.Blocks(title="Gradio Agents & MCP Hackathon 2025") as demo:
134
+ gr.Markdown("""
135
+ # 🧠 SQLite MCP Chatbot — <span style='color:#4A90E2;'>Gradio + Ollama + MCP</span>
136
+ ### Designed by **Partha Pratim Ray** for the Gradio Agents & MCP Hackathon 2025 🚀
137
+ """, elem_id="header")
138
+
139
+ with gr.Row():
140
+ with gr.Column():
141
+ chatbot = gr.Chatbot(label="🗨️ Chat Window", type="messages", height=400)
142
+ user_input = gr.Textbox(placeholder="Type your question…", show_label=False)
143
+ submit_btn = gr.Button("Submit")
144
+ clear_btn = gr.Button("Clear Chat")
145
+ with gr.Column():
146
+ gr.Markdown("### 📜 Recent Interactions (Last 5)")
147
+ output_display = gr.HTML()
148
+
149
+ def update_recent_display():
150
+ rows = fetch_recent_interactions()
151
+ display = "<div style='font-family:monospace;'>"
152
+ for prompt, response, sec, ts in rows:
153
+ display += f"<div style='margin-bottom:12px; padding:10px; border-left: 4px solid #4A90E2;'>"
154
+ display += f"<strong>🕒 {ts}</strong><br><strong>Prompt:</strong> {prompt}<br><strong>Response:</strong> {response[:300]}...<br><strong>⏱ Time:</strong> {sec} sec"
155
+ display += "</div>"
156
+ return display + "</div>"
157
+
158
+ def on_submit(msg, chat):
159
+ new_chat, _ = handle_message(msg, chat)
160
+ recent_html = update_recent_display()
161
+ return new_chat, "", recent_html
162
+
163
+ submit_btn.click(on_submit, inputs=[user_input, chatbot], outputs=[chatbot, user_input, output_display])
164
+ user_input.submit(on_submit, inputs=[user_input, chatbot], outputs=[chatbot, user_input, output_display])
165
+ clear_btn.click(lambda: ([], "", update_recent_display()), None, [chatbot, user_input, output_display])
166
+
167
+ # Load recent on startup
168
+ demo.load(update_recent_display, None, output_display)
169
+
170
+ if __name__ == "__main__":
171
+ demo.launch()
172
+
demo.launcher ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ :contentReference[oaicite:16]{index=16}
3
+ ollama serve &
4
+ sleep 5
5
+
6
+ # Launch your app
7
+ :contentReference[oaicite:17]{index=17}
8
+
requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ fastmcp>=2.0.0
2
+ gradio>=3.30
3
+ ollama>=0.5.0
4
+ llama-index-llms-ollama>=0.6.1
5
+ llama-index-tools-mcp>=0.2.3
6
+ llama-index>=0.12.39
7
+
server.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # server.py
2
+
3
+ import sqlite3
4
+ from fastmcp import FastMCP
5
+
6
+ # Initialize MCP server
7
+ mcp = FastMCP(
8
+ name="SQLiteMCPServer",
9
+ port=8000,
10
+ transport="streamable-http",
11
+ instructions="Tools: add_data(query) and read_data(query)."
12
+ )
13
+
14
+ # --- Database Setup ---
15
+ DB_PATH = "demo.db"
16
+ _conn = sqlite3.connect(DB_PATH, check_same_thread=False)
17
+ _cursor = _conn.cursor()
18
+
19
+ print("🔧 Connecting to SQLite DB:", DB_PATH)
20
+
21
+ # Create tables
22
+ print("🛠️ Ensuring table 'people' exists…")
23
+ _cursor.execute("""
24
+ CREATE TABLE IF NOT EXISTS people (
25
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
26
+ name TEXT NOT NULL,
27
+ age INTEGER NOT NULL,
28
+ profession TEXT NOT NULL
29
+ )
30
+ """)
31
+
32
+ print("🛠️ Ensuring table 'interactions' exists…")
33
+ _cursor.execute("""
34
+ CREATE TABLE IF NOT EXISTS interactions (
35
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
36
+ prompt TEXT NOT NULL,
37
+ response TEXT NOT NULL,
38
+ time_taken_sec REAL NOT NULL,
39
+ timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
40
+ )
41
+ """)
42
+
43
+ _conn.commit()
44
+ print("✅ Tables are ready.")
45
+
46
+
47
+ # --- Tool: Add Record ---
48
+ @mcp.tool(name="add_data", description="Insert a record via SQL")
49
+ def add_data(query: str) -> bool:
50
+ print(f"📥 [add_data] Executing query:\n{query}")
51
+ try:
52
+ _cursor.execute(query)
53
+ _conn.commit()
54
+ print("✅ Inserted successfully.")
55
+ return True
56
+ except Exception as e:
57
+ print(f"❌ Insert error: {e}")
58
+ return False
59
+
60
+
61
+ # --- Tool: Read Records ---
62
+ @mcp.tool(name="read_data", description="Query records via SQL")
63
+ def read_data(query: str = "SELECT * FROM people") -> list:
64
+ print(f"📤 [read_data] Executing query:\n{query}")
65
+ try:
66
+ _cursor.execute(query)
67
+ results = _cursor.fetchall()
68
+ print(f"✅ Retrieved {len(results)} rows.")
69
+ return results
70
+ except Exception as e:
71
+ print(f"❌ Read error: {e}")
72
+ return []
73
+
74
+
75
+ # --- Run the Server ---
76
+ if __name__ == "__main__":
77
+ print("🚀 Starting SQLite MCP server on http://127.0.0.1:8000 …")
78
+ mcp.run(transport="streamable-http", host="127.0.0.1", port=8000)
79
+