# Start Ollama server in background | |
echo "π Starting Ollama server..." | |
ollama serve & | |
# Wait for Ollama to be ready on port 11434 | |
echo "β³ Waiting for Ollama to start..." | |
while ! nc -z localhost 11434; do | |
sleep 1 | |
done | |
# Pull default model | |
echo "π₯ Pulling model..." | |
ollama pull llama2 | |
# Start Cloudflared tunnel | |
echo "π Exposing Ollama through Cloudflare tunnel..." | |
exec node ollama.js |