Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -18,6 +18,12 @@ import faiss
|
|
18 |
from fastapi import FastAPI
|
19 |
from ctransformers import AutoModelForCausalLM
|
20 |
from src.core.cognitive_engine import CognitiveEngine
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
|
22 |
# Load LLM model
|
23 |
llm_model = AutoModelForCausalLM.from_pretrained(
|
@@ -44,15 +50,20 @@ def home():
|
|
44 |
def status():
|
45 |
return {"status": "active", "agents": ["planner", "executor", "critic"]}
|
46 |
|
47 |
-
@app.
|
48 |
-
def
|
49 |
-
response = llm_model(prompt)
|
50 |
-
add_to_memory(prompt, response)
|
51 |
-
return {"response": response}
|
52 |
|
53 |
@app.get("/memory")
|
54 |
def view_memory():
|
55 |
return {"history": get_memory()}
|
|
|
|
|
|
|
|
|
|
|
56 |
|
57 |
# Test the model at startup (optional)
|
58 |
if __name__ == "__main__":
|
|
|
18 |
from fastapi import FastAPI
|
19 |
from ctransformers import AutoModelForCausalLM
|
20 |
from src.core.cognitive_engine import CognitiveEngine
|
21 |
+
from pydantic import BaseModel
|
22 |
+
from memory import add_to_memory, get_memory
|
23 |
+
|
24 |
+
class GenerateRequest(BaseModel):
|
25 |
+
prompt: str
|
26 |
+
session_id: str
|
27 |
|
28 |
# Load LLM model
|
29 |
llm_model = AutoModelForCausalLM.from_pretrained(
|
|
|
50 |
def status():
|
51 |
return {"status": "active", "agents": ["planner", "executor", "critic"]}
|
52 |
|
53 |
+
@app.post("/generate")
|
54 |
+
def generate_post(data: GenerateRequest):
|
55 |
+
response = llm_model(data.prompt)
|
56 |
+
add_to_memory(data.session_id, data.prompt, response)
|
57 |
+
return {"session_id": data.session_id, "response": response}
|
58 |
|
59 |
@app.get("/memory")
|
60 |
def view_memory():
|
61 |
return {"history": get_memory()}
|
62 |
+
|
63 |
+
@app.get("/memory/{session_id}")
|
64 |
+
def session_memory(session_id: str):
|
65 |
+
return {"session_id": session_id, "history": get_memory(session_id)}
|
66 |
+
|
67 |
|
68 |
# Test the model at startup (optional)
|
69 |
if __name__ == "__main__":
|