acecalisto3 commited on
Commit
f5a6bc2
·
verified ·
1 Parent(s): 6789a4f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +181 -51
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import streamlit as st
2
  from streamlit_ace import st_ace
3
- from transformers import pipeline, AutoTokenizer
 
4
  import os
5
  import subprocess
6
  import black
@@ -10,21 +11,6 @@ import sys
10
  import re
11
  from typing import List, Dict
12
  from streamlit_jupyter import StreamlitPatcher, tqdm
13
- from .agents import (
14
- TextGenerationTool,
15
- AIAgent,
16
- process_input,
17
- run_code,
18
- workspace_interface,
19
- add_code_to_workspace,
20
- display_chat_history,
21
- display_workspace_projects,
22
- generate_space_content,
23
- analyze_code,
24
- get_code_completion,
25
- lint_code,
26
- format_code
27
- )
28
 
29
  # This line should be at the top of your script
30
  StreamlitPatcher().jupyter() # This patches Streamlit to work in Jupyter
@@ -39,7 +25,7 @@ PROJECT_ROOT = "projects"
39
  AGENT_DIRECTORY = "agents"
40
  AVAILABLE_CODE_GENERATIVE_MODELS = ["bigcode/starcoder", "Salesforce/codegen-350M-mono", "microsoft/CodeGPT-small"]
41
 
42
- # Global state to manage communication between Tool Box and Workspace Chat App
43
  if 'chat_history' not in st.session_state:
44
  st.session_state.chat_history = []
45
  if 'terminal_history' not in st.session_state:
@@ -52,11 +38,147 @@ if 'available_agents' not in st.session_state:
52
  # AI Guide Toggle
53
  ai_guide_level = st.sidebar.radio("AI Guide Level", ["Full Assistance", "Partial Assistance", "No Assistance"])
54
 
55
- # Load the CodeGPT tokenizer explicitly
56
  code_generator_tokenizer = AutoTokenizer.from_pretrained("microsoft/CodeGPT-small-py", clean_up_tokenization_spaces=True)
57
- # Load the CodeGPT model for code completion
58
  code_generator = pipeline("text-generation", model="microsoft/CodeGPT-small-py", tokenizer=code_generator_tokenizer)
59
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  def main():
61
  st.title("Streamlit Workspace")
62
 
@@ -122,51 +244,59 @@ def main():
122
  elif ai_guide_level == "Partial Assistance":
123
  guide_chat_history.append((
124
  "I'm building a Streamlit app to display data from a CSV file.",
125
- "Great! Let's start by creating a new project in the workspace."
126
  ))
127
-
128
- display_ai_guide_chat(guide_chat_history)
129
 
130
  with workspace_tabs[1]:
131
  # Tool Box Tab
132
  st.subheader("Tool Box")
133
- code_input = st_ace(language='python', theme='monokai', key='code_input')
134
- if st.button("Run Code"):
135
- output = run_code(code_input)
136
- st.text_area("Output", output, height=200)
137
-
 
138
  if st.button("Analyze Code"):
139
- hints = analyze_code(code_input)
140
- st.text_area("Hints", "\n".join(hints), height=200)
141
-
142
- if st.button("Format Code"):
143
- formatted_code = format_code(code_input)
144
- st_ace(value=formatted_code, language='python', theme='monokai', key='formatted_code')
145
-
146
- if st.button("Lint Code"):
147
- lint_messages = lint_code(code_input)
148
- st.text_area("Lint Messages", "\n".join(lint_messages), height=200)
149
-
150
  if st.button("Get Code Completion"):
151
- completion = get_code_completion(code_input)
152
- st_ace(value=completion, language='python', theme='monokai', key='code_completion')
 
 
 
 
 
 
 
 
 
 
 
153
 
154
  with workspace_tabs[2]:
155
  # Projects Tab
156
  st.subheader("Projects")
157
- project_name = st.text_input("Project Name")
158
- if st.button("Create Project"):
159
- message = workspace_interface(project_name)
160
- st.write(message)
161
 
162
- file_name = st.text_input("File Name")
163
- code_content = st_ace(language='python', theme='monokai', key='code_content')
164
- if st.button("Add Code to Project"):
165
- message = add_code_to_workspace(project_name, code_content, file_name)
166
- st.write(message)
167
 
168
- st.subheader("Workspace Projects")
169
- st.markdown(display_workspace_projects(st.session_state.workspace_projects))
 
 
 
 
 
170
 
171
  if __name__ == "__main__":
172
  main()
 
1
  import streamlit as st
2
  from streamlit_ace import st_ace
3
+ from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
4
+ from huggingface_hub import HfApi
5
  import os
6
  import subprocess
7
  import black
 
11
  import re
12
  from typing import List, Dict
13
  from streamlit_jupyter import StreamlitPatcher, tqdm
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  # This line should be at the top of your script
16
  StreamlitPatcher().jupyter() # This patches Streamlit to work in Jupyter
 
25
  AGENT_DIRECTORY = "agents"
26
  AVAILABLE_CODE_GENERATIVE_MODELS = ["bigcode/starcoder", "Salesforce/codegen-350M-mono", "microsoft/CodeGPT-small"]
27
 
28
+ # Global state management
29
  if 'chat_history' not in st.session_state:
30
  st.session_state.chat_history = []
31
  if 'terminal_history' not in st.session_state:
 
38
  # AI Guide Toggle
39
  ai_guide_level = st.sidebar.radio("AI Guide Level", ["Full Assistance", "Partial Assistance", "No Assistance"])
40
 
41
+ # Load the CodeGPT tokenizer and model
42
  code_generator_tokenizer = AutoTokenizer.from_pretrained("microsoft/CodeGPT-small-py", clean_up_tokenization_spaces=True)
 
43
  code_generator = pipeline("text-generation", model="microsoft/CodeGPT-small-py", tokenizer=code_generator_tokenizer)
44
 
45
+ class TextGenerationTool:
46
+ def __init__(self, llm: str):
47
+ self.llm = llm
48
+ self.tokenizer = AutoTokenizer.from_pretrained(llm)
49
+ self.model = AutoModelForCausalLM.from_pretrained(llm)
50
+
51
+ def generate_text(self, prompt: str, max_length: int = 50) -> str:
52
+ inputs = self.tokenizer(prompt, return_tensors="pt")
53
+ outputs = self.model.generate(**inputs, max_length=max_length)
54
+ return self.tokenizer.decode(outputs[0], skip_special_tokens=True)
55
+
56
+ class AIAgent:
57
+ def __init__(self, name: str, description: str, skills: List[str], llm: str):
58
+ self.name = name
59
+ self.description = description
60
+ self.skills = skills
61
+ self.text_gen_tool = TextGenerationTool(llm)
62
+ self._hf_api = HfApi()
63
+
64
+ def generate_agent_response(self, prompt: str) -> str:
65
+ return self.text_gen_tool.generate_text(prompt)
66
+
67
+ def create_agent_prompt(self) -> str:
68
+ skills_str = '\n'.join([f"* {skill}" for skill in self.skills])
69
+ agent_prompt = f"""
70
+ As an elite expert developer, my name is {self.name}. I possess a comprehensive understanding of the following areas:
71
+ {skills_str}
72
+ I am confident that I can leverage my expertise to assist you in developing and deploying cutting-edge web applications. Please feel free to ask any questions or present any challenges you may encounter.
73
+ """
74
+ return agent_prompt
75
+
76
+ def autonomous_build(self, chat_history: List[tuple[str, str]], workspace_projects: Dict[str, Dict],
77
+ project_name: str, selected_model: str, hf_token: str) -> tuple[str, str]:
78
+ summary = "Chat History:\n" + "\n".join([f"User: {u}\nAgent: {a}" for u, a in chat_history])
79
+ summary += "\n\nWorkspace Projects:\n" + "\n".join([f"{p}: {details}" for p, details in workspace_projects.items()])
80
+ next_step = "Based on the current state, the next logical step is to implement the main application logic."
81
+ return summary, next_step
82
+
83
+ def deploy_built_space_to_hf(self, project_name: str) -> str:
84
+ space_content = generate_space_content(project_name)
85
+ repository = self._hf_api.create_repo(
86
+ repo_id=project_name,
87
+ private=True,
88
+ token=hf_token,
89
+ exist_ok=True,
90
+ space_sdk="streamlit"
91
+ )
92
+ self._hf_api.upload_file(
93
+ path_or_fileobj=space_content,
94
+ path_in_repo="app.py",
95
+ repo_id=project_name,
96
+ repo_type="space",
97
+ token=hf_token
98
+ )
99
+ return repository.name
100
+
101
+ def has_valid_hf_token(self) -> bool:
102
+ return self._hf_api.whoami(token=hf_token) is not None
103
+
104
+ def process_input(input_text: str) -> str:
105
+ chatbot = pipeline("text-generation", model="microsoft/DialoGPT-medium", tokenizer="microsoft/DialoGPT-medium", clean_up_tokenization_spaces=True)
106
+ response = chatbot(input_text, max_length=50, num_return_sequences=1)[0]['generated_text']
107
+ return response
108
+
109
+ def run_code(code: str) -> str:
110
+ try:
111
+ result = subprocess.run(code, shell=True, capture_output=True, text=True)
112
+ return result.stdout
113
+ except Exception as e:
114
+ return str(e)
115
+
116
+ def workspace_interface(project_name: str) -> str:
117
+ project_path = os.path.join(PROJECT_ROOT, project_name)
118
+ if not os.path.exists(project_path):
119
+ os.makedirs(project_path)
120
+ st.session_state.workspace_projects[project_name] = {'files': []}
121
+ return f"Project '{project_name}' created successfully."
122
+ else:
123
+ return f"Project '{project_name}' already exists."
124
+
125
+ def add_code_to_workspace(project_name: str, code: str, file_name: str) -> str:
126
+ project_path = os.path.join(PROJECT_ROOT, project_name)
127
+ if not os.path.exists(project_path):
128
+ return f"Project '{project_name}' does not exist."
129
+
130
+ file_path = os.path.join(project_path, file_name)
131
+ with open(file_path, "w") as file:
132
+ file.write(code)
133
+ st.session_state.workspace_projects[project_name]['files'].append(file_name)
134
+ return f"Code added to '{file_name}' in project '{project_name}'."
135
+
136
+ def display_chat_history(chat_history: List[tuple[str, str]]) -> str:
137
+ return "\n".join([f"User: {u}\nAgent: {a}" for u, a in chat_history])
138
+
139
+ def display_workspace_projects(workspace_projects: Dict[str, Dict]) -> str:
140
+ return "\n".join([f"{p}: {details}" for p, details in workspace_projects.items()])
141
+
142
+ def generate_space_content(project_name: str) -> str:
143
+ return "import streamlit as st\nst.title('My Streamlit App')\nst.write('Hello, world!')"
144
+
145
+ def analyze_code(code: str) -> List[str]:
146
+ hints = []
147
+
148
+ if re.search(r'for .* in .*:\n\s+.*\.append\(', code):
149
+ hints.append("Consider using a list comprehension instead of a loop for appending to a list.")
150
+
151
+ if re.search(r'\".*\%s\"|\'.*\%s\'', code) or re.search(r'\".*\%d\"|\'.*\%d\'', code):
152
+ hints.append("Consider using f-strings for cleaner and more efficient string formatting.")
153
+
154
+ if re.search(r'\bglobal\b', code):
155
+ hints.append("Avoid using global variables. Consider passing parameters or using classes.")
156
+
157
+ if re.search(r'open\(.+\)', code) and not re.search(r'with open\(.+\)', code):
158
+ hints.append("Consider using the `with` statement when opening files to ensure proper resource management.")
159
+
160
+ return hints
161
+
162
+ def get_code_completion(prompt: str) -> str:
163
+ completions = code_generator(prompt, max_new_tokens=50, num_return_sequences=1)
164
+ return completions[0]['generated_text']
165
+
166
+ def lint_code(code: str) -> List[str]:
167
+ pylint_output = StringIO()
168
+ sys.stdout = pylint_output
169
+
170
+ lint.Run(['--from-stdin'], do_exit=False, input=code)
171
+
172
+ sys.stdout = sys.__stdout__
173
+
174
+ messages = pylint_output.getvalue().splitlines()
175
+
176
+ return messages
177
+
178
+ def format_code(code: str) -> str:
179
+ formatted_code = black.format_str(code, mode=black.FileMode())
180
+ return formatted_code
181
+
182
  def main():
183
  st.title("Streamlit Workspace")
184
 
 
244
  elif ai_guide_level == "Partial Assistance":
245
  guide_chat_history.append((
246
  "I'm building a Streamlit app to display data from a CSV file.",
247
+ "That's a great project! Let me know if you need any help with specific parts of the implementation."
248
  ))
249
+
250
+ st.markdown(display_chat_history(guide_chat_history))
251
 
252
  with workspace_tabs[1]:
253
  # Tool Box Tab
254
  st.subheader("Tool Box")
255
+
256
+ # Code Editor
257
+ st.subheader("Code Editor")
258
+ code = st_ace(language="python", theme="monokai", key="code_editor")
259
+
260
+ # Code Analysis
261
  if st.button("Analyze Code"):
262
+ hints = analyze_code(code)
263
+ for hint in hints:
264
+ st.info(hint)
265
+
266
+ # Code Completion
 
 
 
 
 
 
267
  if st.button("Get Code Completion"):
268
+ completion = get_code_completion(code)
269
+ st.code(completion, language="python")
270
+
271
+ # Code Linting
272
+ if st.button("Lint Code"):
273
+ lint_messages = lint_code(code)
274
+ for message in lint_messages:
275
+ st.warning(message)
276
+
277
+ # Code Formatting
278
+ if st.button("Format Code"):
279
+ formatted_code = format_code(code)
280
+ st.code(formatted_code, language="python")
281
 
282
  with workspace_tabs[2]:
283
  # Projects Tab
284
  st.subheader("Projects")
285
+ st.markdown(display_workspace_projects(st.session_state.workspace_projects))
 
 
 
286
 
287
+ # Create new project
288
+ new_project = st.text_input("Enter new project name:")
289
+ if st.button("Create Project"):
290
+ result = workspace_interface(new_project)
291
+ st.success(result)
292
 
293
+ # Add code to project
294
+ project_name = st.selectbox("Select project", list(st.session_state.workspace_projects.keys()))
295
+ file_name = st.text_input("Enter file name:")
296
+ code_to_add = st.text_area("Enter code:")
297
+ if st.button("Add Code"):
298
+ result = add_code_to_workspace(project_name, code_to_add, file_name)
299
+ st.success(result)
300
 
301
  if __name__ == "__main__":
302
  main()