csepartha commited on
Commit
f14b058
·
verified ·
1 Parent(s): 8f7f7d7

Upload 6 files

Browse files
Files changed (3) hide show
  1. Dockerfile +8 -7
  2. README.md +26 -0
  3. demo.launcher +7 -3
Dockerfile CHANGED
@@ -1,22 +1,23 @@
1
  FROM ubuntu:22.04
2
 
3
  # System deps
4
- :contentReference[oaicite:10]{index=10}
5
 
6
  # Install Ollama
7
- :contentReference[oaicite:11]{index=11}
8
 
9
  # Copy python deps
10
- :contentReference[oaicite:12]{index=12}
11
- :contentReference[oaicite:13]{index=13}
12
 
13
  # Copy project
14
  COPY . .
15
 
16
- # Non-root user
17
- :contentReference[oaicite:14]{index=14}
18
  USER user
19
 
20
  EXPOSE 7860
21
- :contentReference[oaicite:15]{index=15}
 
22
 
 
1
  FROM ubuntu:22.04
2
 
3
  # System deps
4
+ RUN apt-get update && apt-get install -y curl sqlite3 python3 python3-pip
5
 
6
  # Install Ollama
7
+ RUN curl -fsSL https://ollama.com/install.sh | sh
8
 
9
  # Copy python deps
10
+ COPY requirements.txt .
11
+ RUN pip install --no-cache-dir -r requirements.txt
12
 
13
  # Copy project
14
  COPY . .
15
 
16
+ # Non-root user (recommended for Hugging Face Spaces)
17
+ RUN useradd -ms /bin/bash user && chown -R user:user ./
18
  USER user
19
 
20
  EXPOSE 7860
21
+
22
+ CMD ["./demo.launcher"]
23
 
README.md CHANGED
@@ -1,3 +1,29 @@
 
 
 
 
 
1
  sdk: docker
2
  app_port: 7860
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
 
1
+ ---
2
+ title: Ollama MCP Gradio Demo
3
+ emoji: 🚀
4
+ colorFrom: blue
5
+ colorTo: indigo
6
  sdk: docker
7
  app_port: 7860
8
+ pinned: false
9
+ tags:
10
+ - mcp-server-track
11
+ ---
12
+ # Ollama MCP Gradio Demo
13
+
14
+ This Space runs a SQLite MCP server and Gradio client with Ollama and MCP tools, for the Gradio Agents & MCP Hackathon 2025.
15
+
16
+ - Uses Ollama model: `granite3.1-moe`
17
+ - MCP server on SQLite database
18
+ - Gradio interface with chat and history
19
+
20
+ ## How it works
21
+
22
+ 1. The MCP server exposes tools for adding/reading people.
23
+ 2. Ollama model answers via Gradio, using MCP tools in the backend.
24
+ 3. All runs on Hugging Face Space via Docker.
25
+
26
+ ## Usage
27
+
28
+ Type your questions in the chatbox!
29
 
demo.launcher CHANGED
@@ -1,8 +1,12 @@
1
  #!/bin/bash
2
- :contentReference[oaicite:16]{index=16}
 
3
  ollama serve &
4
  sleep 5
5
 
6
- # Launch your app
7
- :contentReference[oaicite:17]{index=17}
 
 
 
8
 
 
1
  #!/bin/bash
2
+ # Pull your Ollama model (can be moved to build if desired)
3
+ ollama pull granite3.1-moe
4
  ollama serve &
5
  sleep 5
6
 
7
+ # Start MCP server in the background
8
+ python3 server.py &
9
+
10
+ # Start Gradio (client.py) on 0.0.0.0:7860
11
+ python3 client.py --server_name 0.0.0.0 --server_port 7860
12