Leonydis137 commited on
Commit
a8aa7a0
·
verified ·
1 Parent(s): 7ab042d

Upload 25 files

Browse files
Files changed (7) hide show
  1. .github/workflows/deploy.yml +16 -0
  2. README.md +32 -34
  3. agent.py +12 -23
  4. api.py +16 -0
  5. auth.py +7 -12
  6. memory.py +38 -0
  7. session.py +26 -0
.github/workflows/deploy.yml ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ name: Deploy Autonomous AI
3
+
4
+ on:
5
+ push:
6
+ branches: [ main ]
7
+
8
+ jobs:
9
+ build-and-deploy:
10
+ runs-on: ubuntu-latest
11
+ steps:
12
+ - uses: actions/checkout@v3
13
+ - name: Build Docker image
14
+ run: docker build -t autonomous-ai .
15
+ - name: Run container
16
+ run: docker run -d -p 8000:8000 autonomous-ai
README.md CHANGED
@@ -1,34 +1,32 @@
1
- ---
2
- title: Autonomous AI
3
- emoji: 🤖
4
- colorFrom: gray
5
- colorTo: green
6
- sdk: gradio
7
- sdk_version: 5.34.2
8
- app_file: app.py
9
- pinned: false
10
- license: mit
11
- ---
12
- # 🤖 Autonomous AI — Fully Self-Updating Python Agent
13
-
14
- This is a powerful, self-improving autonomous agent capable of:
15
-
16
- - Planning tasks
17
- - Writing and executing Python code
18
- - Debugging itself
19
- - Storing memory and logs
20
- - Growing over time
21
-
22
- ## Files
23
-
24
- - `app.py`: Gradio UI
25
- - `agent.py`: Core self-runner
26
- - `utils.py`: Task planning, logging, memory
27
- - `memory.txt`: Long-term task memory
28
- - `logs/`: JSON logs of each run
29
-
30
- ## Usage
31
-
32
- 1. Upload to [Hugging Face Spaces](https://huggingface.co/spaces)
33
- 2. Set type to `Gradio`
34
- 3. Enjoy your AI developer assistant
 
1
+
2
+ # Autonomous AI — Self-Improving Agent
3
+
4
+ This is an autonomous AI system capable of learning, improving, and handling tasks via API, Webhooks, and a Client Portal.
5
+
6
+ ## 🚀 Features
7
+ - Self-upgrading agent loop
8
+ - Multi-agent collaboration
9
+ - Feedback storage + analytics dashboard
10
+ - Webhook + portal UI
11
+ - Scheduler & voice interface
12
+
13
+ ## 🐳 Deploy with Docker
14
+
15
+ ```bash
16
+ docker build -t autonomous-ai .
17
+ docker run -p 8000:8000 autonomous-ai
18
+ ```
19
+
20
+ Visit: http://localhost:8000/portal
21
+
22
+ ## 🌐 Manual Start (No Docker)
23
+ ```bash
24
+ pip install -r requirements.txt
25
+ python -c "from storage import init_db; init_db()"
26
+ uvicorn api:app --reload
27
+ ```
28
+
29
+ ## 🛠 Endpoints
30
+ - `/portal` - Task submission UI
31
+ - `/dashboard` - Feedback/goal display
32
+ - `/webhook` - POST task externally
 
 
agent.py CHANGED
@@ -1,24 +1,13 @@
1
 
2
- import os
3
- import traceback
4
- from datetime import datetime
5
- from utils import save_log, read_memory, write_memory, plan_task, generate_code, run_code
6
-
7
- MEMORY_FILE = "memory.txt"
8
- LOG_DIR = "logs"
9
-
10
- def autonomous_agent(task):
11
- try:
12
- plan = plan_task(task)
13
- code = generate_code(task)
14
- result = run_code(code)
15
-
16
- # Save memory and logs
17
- write_memory(f"{datetime.now()}: Completed task '{task}'")
18
- save_log(task, plan, code, result)
19
-
20
- return f"✅ Task Complete:\nPlan: {plan}\n\nCode:\n{code}\n\nResult:\n{result}"
21
- except Exception as e:
22
- error = traceback.format_exc()
23
- save_log(task, "FAILED", "None", error)
24
- return f"❌ Task Failed:\n{error}"
 
1
 
2
+ from memory import MemoryVectorStore
3
+
4
+ memory = MemoryVectorStore()
5
+
6
+ def run_agent(goal):
7
+ memory.add(goal)
8
+ ideas = memory.search(goal)
9
+ # Assume goal is passed into LLM with memory summary
10
+ summary = " ".join(ideas)
11
+ result = f"[Task]: {goal}\n[Memory]: {summary}\n[Response]: working on it..."
12
+ memory.add(result)
13
+ return result
 
 
 
 
 
 
 
 
 
 
 
api.py CHANGED
@@ -53,3 +53,19 @@ async def submit_task(goal: str = Form(...)):
53
  async def dashboard():
54
  with open("dashboard.html") as f:
55
  return f.read()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  async def dashboard():
54
  with open("dashboard.html") as f:
55
  return f.read()
56
+
57
+
58
+ from fastapi import Header, HTTPException
59
+ from auth import authorize
60
+ from session import log_interaction, create_session
61
+
62
+ @app.post("/session_task")
63
+ def session_task(goal: str, x_api_key: str = Header(...), session_id: str = ""):
64
+ user = authorize(x_api_key)
65
+ if not user:
66
+ raise HTTPException(status_code=401, detail="Invalid API key")
67
+ if not session_id:
68
+ session_id = create_session(user)
69
+ output = run_agent(goal)
70
+ log_interaction(session_id, goal, output)
71
+ return {"session_id": session_id, "output": output}
auth.py CHANGED
@@ -1,14 +1,9 @@
1
 
2
- from fastapi import Request, HTTPException
3
- from starlette.middleware.base import BaseHTTPMiddleware
 
 
 
4
 
5
- API_KEY = "super-secret-key" # TODO: Secure this
6
-
7
- class APIKeyMiddleware(BaseHTTPMiddleware):
8
- async def dispatch(self, request: Request, call_next):
9
- if request.url.path.startswith("/"):
10
- api_key = request.headers.get("x-api-key")
11
- if api_key != API_KEY:
12
- raise HTTPException(status_code=401, detail="Invalid API Key")
13
- response = await call_next(request)
14
- return response
 
1
 
2
+ # Very simple API key system
3
+ AUTHORIZED_KEYS = {
4
+ "admin-key": "admin",
5
+ "user-key": "user"
6
+ }
7
 
8
+ def authorize(key: str):
9
+ return AUTHORIZED_KEYS.get(key)
 
 
 
 
 
 
 
 
memory.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import os
3
+ import faiss
4
+ import pickle
5
+ from sentence_transformers import SentenceTransformer
6
+ from logger import logger
7
+
8
+ class MemoryVectorStore:
9
+ def __init__(self, path="memory_index"):
10
+ self.path = path
11
+ self.model = SentenceTransformer("all-MiniLM-L6-v2")
12
+ self.dim = 384
13
+ self.index = faiss.IndexFlatL2(self.dim)
14
+ self.memory = []
15
+ self.load()
16
+
17
+ def load(self):
18
+ if os.path.exists(self.path + ".pkl"):
19
+ with open(self.path + ".pkl", "rb") as f:
20
+ data = pickle.load(f)
21
+ self.memory = data["memory"]
22
+ self.index = data["index"]
23
+ logger.info("✅ Memory index loaded")
24
+
25
+ def save(self):
26
+ with open(self.path + ".pkl", "wb") as f:
27
+ pickle.dump({"memory": self.memory, "index": self.index}, f)
28
+
29
+ def add(self, text):
30
+ vec = self.model.encode([text])
31
+ self.index.add(vec)
32
+ self.memory.append(text)
33
+ self.save()
34
+
35
+ def search(self, query, k=3):
36
+ vec = self.model.encode([query])
37
+ D, I = self.index.search(vec, k)
38
+ return [self.memory[i] for i in I[0] if i < len(self.memory)]
session.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import sqlite3
3
+ from uuid import uuid4
4
+
5
+ def init_session_db():
6
+ conn = sqlite3.connect("sessions.db")
7
+ conn.execute("CREATE TABLE IF NOT EXISTS sessions (id TEXT PRIMARY KEY, user TEXT)")
8
+ conn.execute("CREATE TABLE IF NOT EXISTS messages (session_id TEXT, user_input TEXT, ai_output TEXT)")
9
+ conn.commit()
10
+ conn.close()
11
+
12
+ def create_session(user):
13
+ sid = str(uuid4())
14
+ with sqlite3.connect("sessions.db") as conn:
15
+ conn.execute("INSERT INTO sessions (id, user) VALUES (?, ?)", (sid, user))
16
+ return sid
17
+
18
+ def log_interaction(session_id, user_input, ai_output):
19
+ with sqlite3.connect("sessions.db") as conn:
20
+ conn.execute("INSERT INTO messages (session_id, user_input, ai_output) VALUES (?, ?, ?)",
21
+ (session_id, user_input, ai_output))
22
+
23
+ def get_session_logs(session_id):
24
+ with sqlite3.connect("sessions.db") as conn:
25
+ rows = conn.execute("SELECT user_input, ai_output FROM messages WHERE session_id=?", (session_id,)).fetchall()
26
+ return rows