PyxiLabs commited on
Commit
323f9a7
Β·
verified Β·
1 Parent(s): 7d96ccd

Update ollama.sh

Browse files
Files changed (1) hide show
  1. ollama.sh +9 -8
ollama.sh CHANGED
@@ -4,15 +4,16 @@
4
  echo "πŸš€ Starting Ollama server..."
5
  ollama serve &
6
 
7
- # Wait for Ollama to be ready
8
  echo "⏳ Waiting for Ollama to start..."
9
- sleep 5
 
 
10
 
11
- # Optional: Pull default model
12
- echo "πŸ“₯ Pulling default model..."
13
- ollama pull all-minilm
14
 
15
- # Start FastAPI reverse proxy
16
- echo "πŸ”€ Starting FastAPI reverse proxy on port 7860..."
17
- #exec python -m uvicorn ollama:app --host 0.0.0.0 --port 7860 --proxy-headers
18
  exec node ollama.js
 
4
  echo "πŸš€ Starting Ollama server..."
5
  ollama serve &
6
 
7
+ # Wait for Ollama to be ready on port 11434
8
  echo "⏳ Waiting for Ollama to start..."
9
+ while ! nc -z localhost 11434; do
10
+ sleep 1
11
+ done
12
 
13
+ # Pull default model
14
+ echo "πŸ“₯ Pulling model..."
15
+ ollama pull llama2
16
 
17
+ # Start Cloudflared tunnel
18
+ echo "πŸ”— Exposing Ollama through Cloudflare tunnel..."
 
19
  exec node ollama.js