0shotTest / app.py
acecalisto3's picture
Update app.py
384937e verified
raw
history blame
7.02 kB
import streamlit as st
from streamlit_ace import st_ace
from transformers import pipeline, AutoTokenizer
import os
import subprocess
import black
from pylint import lint
from io import StringIO
import sys
import re
from typing import List, Dict
from streamlit_jupyter import StreamlitPatcher, tqdm
from .agents import (
TextGenerationTool,
AIAgent,
process_input,
run_code,
workspace_interface,
add_code_to_workspace,
display_chat_history,
display_workspace_projects,
generate_space_content,
analyze_code,
get_code_completion,
lint_code,
format_code
)
# This line should be at the top of your script
StreamlitPatcher().jupyter() # This patches Streamlit to work in Jupyter
# Access Hugging Face API key from secrets
hf_token = st.secrets["hf_token"]
if not hf_token:
st.error("Hugging Face API key not found. Please make sure it is set in the secrets.")
HUGGING_FACE_REPO_URL = "https://huggingface.co/spaces/acecalisto3/0shotTest"
PROJECT_ROOT = "projects"
AGENT_DIRECTORY = "agents"
AVAILABLE_CODE_GENERATIVE_MODELS = ["bigcode/starcoder", "Salesforce/codegen-350M-mono", "microsoft/CodeGPT-small"]
# Global state to manage communication between Tool Box and Workspace Chat App
if 'chat_history' not in st.session_state:
st.session_state.chat_history = []
if 'terminal_history' not in st.session_state:
st.session_state.terminal_history = []
if 'workspace_projects' not in st.session_state:
st.session_state.workspace_projects = {}
if 'available_agents' not in st.session_state:
st.session_state.available_agents = []
# AI Guide Toggle
ai_guide_level = st.sidebar.radio("AI Guide Level", ["Full Assistance", "Partial Assistance", "No Assistance"])
# Load the CodeGPT tokenizer explicitly
code_generator_tokenizer = AutoTokenizer.from_pretrained("microsoft/CodeGPT-small-py", clean_up_tokenization_spaces=True)
# Load the CodeGPT model for code completion
code_generator = pipeline("text-generation", model="microsoft/CodeGPT-small-py", tokenizer=code_generator_tokenizer)
def main():
st.title("Streamlit Workspace")
# Load agents from the agent directory
agent_files = [f for f in os.listdir(AGENT_DIRECTORY) if f.endswith(".py")]
for agent_file in agent_files:
agent_module = __import__(f"{AGENT_DIRECTORY}.{os.path.splitext(agent_file)[0]}")
agent_class = getattr(agent_module, os.path.splitext(agent_file)[0])
agent_instance = agent_class()
st.session_state.available_agents.append(agent_instance)
# Display the available agents
st.subheader("Available Agents")
for agent in st.session_state.available_agents:
st.write(f"**{agent.name}**: {agent.description}")
# Select an agent
selected_agent = st.selectbox("Select an Agent", [agent.name for agent in st.session_state.available_agents])
current_agent = next((agent for agent in st.session_state.available_agents if agent.name == selected_agent), None)
# Display the agent's prompt
if current_agent:
st.subheader(f"{current_agent.name} Prompt")
st.write(current_agent.create_agent_prompt())
# Workspace Tab
st.subheader("Workspace")
workspace_tabs = st.tabs(["Chat", "Tool Box", "Projects"])
with workspace_tabs[0]:
# Chat Tab
st.subheader("Chat with your Agent")
user_input = st.text_input("Enter your message:")
if user_input:
st.session_state.chat_history.append((user_input, current_agent.generate_agent_response(user_input)))
user_input = "" # Clear the input field
# Display chat history
st.markdown(display_chat_history(st.session_state.chat_history))
# AI Guide
if ai_guide_level != "No Assistance":
st.subheader("AI Guide")
guide_chat_history = []
if ai_guide_level == "Full Assistance":
guide_chat_history.append((
"I'm building a Streamlit app to display data from a CSV file.",
"Great! Let's start by creating a new project in the workspace."
))
guide_chat_history.append((
"Create a new project called 'data_app'.",
"Okay, I've created the project 'data_app'. What would you like to name the main file?"
))
guide_chat_history.append((
"Name it 'app.py'.",
"Alright, I've added an empty 'app.py' file to the 'data_app' project. Now, let's add some code to read the CSV file."
))
guide_chat_history.append((
"Add the following code to 'app.py':\n```python\nimport pandas as pd\nimport streamlit as st\n\ndata = pd.read_csv('data.csv')\nst.write(data)\n```",
"Excellent! Now you can run this code to see the data from your CSV file in the Streamlit app."
))
elif ai_guide_level == "Partial Assistance":
guide_chat_history.append((
"I'm building a Streamlit app to display data from a CSV file.",
"Great! Let's start by creating a new project in the workspace."
))
display_ai_guide_chat(guide_chat_history)
with workspace_tabs[1]:
# Tool Box Tab
st.subheader("Tool Box")
code_input = st_ace(language='python', theme='monokai', key='code_input')
if st.button("Run Code"):
output = run_code(code_input)
st.text_area("Output", output, height=200)
if st.button("Analyze Code"):
hints = analyze_code(code_input)
st.text_area("Hints", "\n".join(hints), height=200)
if st.button("Format Code"):
formatted_code = format_code(code_input)
st_ace(value=formatted_code, language='python', theme='monokai', key='formatted_code')
if st.button("Lint Code"):
lint_messages = lint_code(code_input)
st.text_area("Lint Messages", "\n".join(lint_messages), height=200)
if st.button("Get Code Completion"):
completion = get_code_completion(code_input)
st_ace(value=completion, language='python', theme='monokai', key='code_completion')
with workspace_tabs[2]:
# Projects Tab
st.subheader("Projects")
project_name = st.text_input("Project Name")
if st.button("Create Project"):
message = workspace_interface(project_name)
st.write(message)
file_name = st.text_input("File Name")
code_content = st_ace(language='python', theme='monokai', key='code_content')
if st.button("Add Code to Project"):
message = add_code_to_workspace(project_name, code_content, file_name)
st.write(message)
st.subheader("Workspace Projects")
st.markdown(display_workspace_projects(st.session_state.workspace_projects))
if __name__ == "__main__":
main()