Echo-ai commited on
Commit
1301813
·
verified ·
1 Parent(s): 09628c7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -10
app.py CHANGED
@@ -1,16 +1,33 @@
1
- from fastapi import FastAPI
2
- from langchain_community.llms import Ollama
3
 
4
  app = FastAPI()
5
 
6
- # Initialize the Ollama model
7
- llm = Ollama(model="tinyllama")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
  @app.get("/")
10
  async def root():
11
- return {"message": "Ollama is running on Hugging Face Spaces!"}
12
-
13
- @app.get("/chat")
14
- async def chat(query: str):
15
- response = llm.invoke(query)
16
- return {"response": response}
 
1
+ from fastapi import FastAPI, HTTPException
2
+ import requests
3
 
4
  app = FastAPI()
5
 
6
+ # Ollama internal URL (running locally in the container)
7
+ OLLAMA_BASE_URL = "http://localhost:11434"
8
+
9
+ # Proxy endpoint to Ollama's API
10
+ @app.get("/api/{path:path}")
11
+ async def ollama_proxy(path: str, query: str = None):
12
+ url = f"{OLLAMA_BASE_URL}/api/{path}"
13
+ params = {"query": query} if query else {}
14
+ try:
15
+ response = requests.get(url, params=params)
16
+ response.raise_for_status()
17
+ return response.json()
18
+ except requests.exceptions.RequestException as e:
19
+ raise HTTPException(status_code=500, detail=str(e))
20
+
21
+ @app.post("/api/{path:path}")
22
+ async def ollama_proxy_post(path: str, body: dict):
23
+ url = f"{OLLAMA_BASE_URL}/api/{path}"
24
+ try:
25
+ response = requests.post(url, json=body)
26
+ response.raise_for_status()
27
+ return response.json()
28
+ except requests.exceptions.RequestException as e:
29
+ raise HTTPException(status_code=500, detail=str(e))
30
 
31
  @app.get("/")
32
  async def root():
33
+ return {"message": "Ollama running on Hugging Face Spaces! use the space url"}