0shotTest / agents.py
acecalisto3's picture
Rename agents/agents.py to agents.py
e990c1b verified
raw
history blame
6.85 kB
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
from huggingface_hub import HfApi
import re
from typing import List, Dict
import subprocess
import os
import black
from pylint import lint
from io import StringIO
import sys
class TextGenerationTool:
def __init__(self, llm: str):
self.llm = llm
self.tokenizer = AutoTokenizer.from_pretrained(llm)
self.model = AutoModelForCausalLM.from_pretrained(llm)
def generate_text(self, prompt: str, max_length: int = 50) -> str:
inputs = self.tokenizer(prompt, return_tensors="pt")
outputs = self.model.generate(**inputs, max_length=max_length)
return self.tokenizer.decode(outputs[0], skip_special_tokens=True)
class AIAgent:
def __init__(self, name: str, description: str, skills: List[str], llm: str):
self.name = name
self.description = description
self.skills = skills
self.text_gen_tool = TextGenerationTool(llm)
self._hf_api = HfApi() # Initialize HfApi here
def generate_agent_response(self, prompt: str) -> str:
return self.text_gen_tool.generate_text(prompt)
def create_agent_prompt(self) -> str:
skills_str = '\n'.join([f"* {skill}" for skill in self.skills])
agent_prompt = f"""
As an elite expert developer, my name is {self.name}. I possess a comprehensive understanding of the following areas:
{skills_str}
I am confident that I can leverage my expertise to assist you in developing and deploying cutting-edge web applications. Please feel free to ask any questions or present any challenges you may encounter.
"""
return agent_prompt
def autonomous_build(self, chat_history: List[tuple[str, str]], workspace_projects: Dict[str, Dict],
project_name: str, selected_model: str, hf_token: str) -> tuple[str, str]:
summary = "Chat History:\n" + "\n".join([f"User: {u}\nAgent: {a}" for u, a in chat_history])
summary += "\n\nWorkspace Projects:\n" + "\n".join([f"{p}: {details}" for p, details in workspace_projects.items()])
next_step = "Based on the current state, the next logical step is to implement the main application logic."
return summary, next_step
def deploy_built_space_to_hf(self, project_name: str) -> str:
space_content = generate_space_content(project_name)
repository = self._hf_api.create_repo(
repo_id=project_name,
private=True,
token=hf_token,
exist_ok=True,
space_sdk="streamlit"
)
self._hf_api.upload_file(
path_or_fileobj=space_content,
path_in_repo="app.py",
repo_id=project_name,
repo_type="space",
token=hf_token
)
return repository.name
def has_valid_hf_token(self) -> bool:
return self._hf_api.whoami(token=hf_token) is not None
def process_input(input_text: str) -> str:
chatbot = pipeline("text-generation", model="microsoft/DialoGPT-medium", tokenizer="microsoft/DialoGPT-medium", clean_up_tokenization_spaces=True)
response = chatbot(input_text, max_length=50, num_return_sequences=1)[0]['generated_text']
return response
def run_code(code: str) -> str:
try:
result = subprocess.run(code, shell=True, capture_output=True, text=True)
return result.stdout
except Exception as e:
return str(e)
def workspace_interface(project_name: str) -> str:
project_path = os.path.join(PROJECT_ROOT, project_name)
if not os.path.exists(project_path):
os.makedirs(project_path)
st.session_state.workspace_projects[project_name] = {'files': []}
return f"Project '{project_name}' created successfully."
else:
return f"Project '{project_name}' already exists."
def add_code_to_workspace(project_name: str, code: str, file_name: str) -> str:
project_path = os.path.join(PROJECT_ROOT, project_name)
if not os.path.exists(project_path):
return f"Project '{project_name}' does not exist."
file_path = os.path.join(project_path, file_name)
with open(file_path, "w") as file:
file.write(code)
st.session_state.workspace_projects[project_name]['files'].append(file_name)
return f"Code added to '{file_name}' in project '{project_name}'."
def display_chat_history(chat_history: List[tuple[str, str]]) -> str:
return "\n".join([f"User: {u}\nAgent: {a}" for u, a in chat_history])
def display_workspace_projects(workspace_projects: Dict[str, Dict]) -> str:
return "\n".join([f"{p}: {details}" for p, details in workspace_projects.items()])
def generate_space_content(project_name: str) -> str:
# Logic to generate the Streamlit app content based on project_name
# ... (This is where you'll need to implement the actual code generation)
return "import streamlit as st\nst.title('My Streamlit App')\nst.write('Hello, world!')"
def analyze_code(code: str) -> List[str]:
hints = []
# Example pointer: Suggest using list comprehensions
if re.search(r'for .* in .*:\n\s+.*\.append\(', code):
hints.append("Consider using a list comprehension instead of a loop for appending to a list.")
# Example pointer: Recommend using f-strings for string formatting
if re.search(r'\".*\%s\"|\'.*\%s\'', code) or re.search(r'\".*\%d\"|\'.*\%d\'', code):
hints.append("Consider using f-strings for cleaner and more efficient string formatting.")
# Example pointer: Avoid using global variables
if re.search(r'\bglobal\b', code):
hints.append("Avoid using global variables. Consider passing parameters or using classes.")
# Example pointer: Recommend using `with` statement for file operations
if re.search(r'open\(.+\)', code) and not re.search(r'with open\(.+\)', code):
hints.append("Consider using the `with` statement when opening files to ensure proper resource management.")
return hints
def get_code_completion(prompt: str) -> str:
# Generate code completion based on the current code input
# Use max_new_tokens instead of max_length
completions = code_generator(prompt, max_new_tokens=50, num_return_sequences=1)
return completions[0]['generated_text']
def lint_code(code: str) -> List[str]:
# Capture pylint output
pylint_output = StringIO()
sys.stdout = pylint_output
# Run pylint on the provided code
lint.Run(['--from-stdin'], do_exit=False, input=code)
# Reset stdout
sys.stdout = sys.__stdout__
# Extract pylint messages
messages = pylint_output.getvalue().splitlines()
return messages
def format_code(code: str) -> str:
# Format code using Black
formatted_code = black.format_str(code, mode=black.FileMode())
return formatted_code