lvwerra HF staff commited on
Commit
f7a9983
·
1 Parent(s): 085880a
Files changed (2) hide show
  1. app.py +26 -19
  2. utils.py +224 -0
app.py CHANGED
@@ -1,34 +1,41 @@
1
  import gradio as gr
 
 
2
 
3
- def combine_inputs(title, content):
4
- # Create a simple HTML template with the inputs
5
- html = f"""
6
- <div style="font-family: Arial, sans-serif; max-width: 800px; margin: 0 auto; padding: 20px;">
7
- <h1 style="color: #2c3e50;">{title}</h1>
8
- <div style="background-color: #f7f9fc; padding: 15px; border-radius: 8px;">
9
- <p style="line-height: 1.6;">{content}</p>
10
- </div>
11
- <footer style="margin-top: 20px; color: #7f8c8d; font-size: 0.9em;">
12
- Generated with Gradio
13
- </footer>
14
- </div>
15
- """
16
- return html
 
 
 
 
 
17
 
18
  # Create the interface
19
  with gr.Blocks() as demo:
20
  gr.Markdown("# HTML Generator")
21
 
22
  with gr.Row():
23
- title_input = gr.Textbox(label="Title", placeholder="Enter your title here")
24
- content_input = gr.Textbox(label="Content", placeholder="Enter your content here", lines=3)
25
 
26
- generate_btn = gr.Button("Generate HTML")
27
- output = gr.HTML(label="Generated HTML")
28
 
29
  generate_btn.click(
30
  fn=combine_inputs,
31
- inputs=[title_input, content_input],
32
  outputs=output
33
  )
34
 
 
1
  import gradio as gr
2
+ from huggingface_hub import InferenceClient
3
+ from e2b_code_interpreter import Sandbox
4
 
5
+ from .utils import run_interactive_notebook
6
+
7
+ message_history = None
8
+
9
+ def execute_jupyter_agent(sytem_prompt, user_input):
10
+ client = InferenceClient(api_key=HF_TOKEN)
11
+ max_new_tokens = 512
12
+ model = "meta-llama/Llama-3.1-8B-Instruct"
13
+
14
+ sbx = Sandbox(api_key=E2B_API_KEY)
15
+
16
+ messages = [
17
+ {"role": "system", "content": "Environment: ipython\nYou are a helpful coding assistant. Always first explain what you are going to do before writing code."},
18
+ {"role": "user", "content": "What is 2+1? Use Python to solve."}
19
+ ]
20
+
21
+ for notebook_html, messages in run_interactive_notebook(client, model, messages, sbx):
22
+ message_history = messages
23
+ yield notebook_html
24
 
25
  # Create the interface
26
  with gr.Blocks() as demo:
27
  gr.Markdown("# HTML Generator")
28
 
29
  with gr.Row():
30
+ system_input = gr.Textbox(label="System prompt", placeholder="Environment: ipython\nYou are a helpful coding assistant. Always first explain what you are going to do before writing code.")
31
+ user_input = gr.Textbox(label="User prompt", placeholder="What is 2+1? Use Python to solve.", lines=3)
32
 
33
+ generate_btn = gr.Button("Let's go!")
34
+ output = gr.HTML(label="Jupyter Notebook")
35
 
36
  generate_btn.click(
37
  fn=combine_inputs,
38
+ inputs=[system_input, user_input],
39
  outputs=output
40
  )
41
 
utils.py ADDED
@@ -0,0 +1,224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import nbformat
2
+ from nbformat.v4 import new_notebook, new_markdown_cell, new_code_cell
3
+ from IPython.display import HTML, display
4
+ from IPython.display import clear_output
5
+ from nbconvert import HTMLExporter
6
+ from huggingface_hub import InferenceClient
7
+ from e2b_code_interpreter import Sandbox
8
+ from transformers import AutoTokenizer
9
+ from traitlets.config import Config
10
+
11
+ config = Config()
12
+ html_exporter = HTMLExporter(config=config, template_name="classic")
13
+
14
+
15
+ def parse_exec_result_nb(execution):
16
+ """Convert an E2B Execution object to Jupyter notebook cell output format"""
17
+ outputs = []
18
+
19
+ if execution.logs.stdout:
20
+ outputs.append({
21
+ 'output_type': 'stream',
22
+ 'name': 'stdout',
23
+ 'text': ''.join(execution.logs.stdout)
24
+ })
25
+
26
+ if execution.logs.stderr:
27
+ outputs.append({
28
+ 'output_type': 'stream',
29
+ 'name': 'stderr',
30
+ 'text': ''.join(execution.logs.stderr)
31
+ })
32
+
33
+ if execution.error:
34
+ outputs.append({
35
+ 'output_type': 'error',
36
+ 'ename': execution.error.name,
37
+ 'evalue': execution.error.value,
38
+ 'traceback': [line for line in execution.error.traceback.split('\n')]
39
+ })
40
+
41
+ for result in execution.results:
42
+ output = {
43
+ 'output_type': 'execute_result' if result.is_main_result else 'display_data',
44
+ 'metadata': {},
45
+ 'data': {}
46
+ }
47
+
48
+ if result.text:
49
+ output['data']['text/plain'] = [result.text] # Array for text/plain
50
+ if result.html:
51
+ output['data']['text/html'] = result.html
52
+ if result.png:
53
+ output['data']['image/png'] = result.png
54
+ if result.svg:
55
+ output['data']['image/svg+xml'] = result.svg
56
+ if result.jpeg:
57
+ output['data']['image/jpeg'] = result.jpeg
58
+ if result.pdf:
59
+ output['data']['application/pdf'] = result.pdf
60
+ if result.latex:
61
+ output['data']['text/latex'] = result.latex
62
+ if result.json:
63
+ output['data']['application/json'] = result.json
64
+ if result.javascript:
65
+ output['data']['application/javascript'] = result.javascript
66
+
67
+ if result.is_main_result and execution.execution_count is not None:
68
+ output['execution_count'] = execution.execution_count
69
+
70
+ if output['data']:
71
+ outputs.append(output)
72
+
73
+ return outputs
74
+
75
+
76
+ system_template = """<div class="alert alert-block alert-info">
77
+ <b>System:</b> {}
78
+ </div>
79
+ """
80
+
81
+ user_template = """<div class="alert alert-block alert-success">
82
+ <b>User:</b> {}
83
+ </div>
84
+ """
85
+
86
+ def create_base_notebook(messages):
87
+ base_notebook = {
88
+ "metadata": {
89
+ "kernel_info": {"name": "python3"},
90
+ "language_info": {
91
+ "name": "python",
92
+ "version": "3.12",
93
+ },
94
+ },
95
+ "nbformat": 4,
96
+ "nbformat_minor": 0,
97
+ "cells": []
98
+ }
99
+
100
+ for message in messages:
101
+ if message["role"] == "system":
102
+ text = system_template.format(message["content"].replace('\n', '<br>'))
103
+ elif message["role"] == "user":
104
+ text = user_template.format(message["content"])
105
+ base_notebook["cells"].append({
106
+ "cell_type": "markdown",
107
+ "metadata": {},
108
+ "source": text
109
+ })
110
+ return base_notebook
111
+
112
+ def execute_code(sbx, code):
113
+ execution = sbx.run_code(code, on_stdout=lambda data: print('stdout:', data))
114
+ output = ""
115
+ if len(execution.logs.stdout) > 0:
116
+ output += "\n".join(execution.logs.stdout)
117
+ if len(execution.logs.stderr) > 0:
118
+ output += "\n".join(execution.logs.stderr)
119
+ if execution.error is not None:
120
+ output += execution.error.traceback
121
+ return output, execution
122
+
123
+
124
+ def parse_exec_result_llm(execution):
125
+ output = ""
126
+ if len(execution.logs.stdout) > 0:
127
+ output += "\n".join(execution.logs.stdout)
128
+ if len(execution.logs.stderr) > 0:
129
+ output += "\n".join(execution.logs.stderr)
130
+ if execution.error is not None:
131
+ output += execution.error.traceback
132
+ return output
133
+
134
+
135
+ def update_notebook_display(notebook_data):
136
+ notebook = nbformat.from_dict(notebook_data)
137
+ notebook_body, _ = html_exporter.from_notebook_node(notebook)
138
+ return notebook_body
139
+
140
+ def run_interactive_notebook(client, model, messages, sbx, max_new_tokens=512):
141
+ notebook_data = create_base_notebook(messages)
142
+ try:
143
+ code_cell_counter = 0
144
+ while True:
145
+ response_stream = client.chat.completions.create(
146
+ model=model,
147
+ messages=messages,
148
+ logprobs=True,
149
+ stream=True,
150
+ max_tokens=max_new_tokens,
151
+ )
152
+
153
+ assistant_response = ""
154
+ tokens = []
155
+ current_cell_content = []
156
+
157
+ code_cell = False
158
+ for i, chunk in enumerate(response_stream):
159
+
160
+ content = chunk.choices[0].delta.content
161
+ tokens.append(chunk.choices[0].logprobs.content[0].token)
162
+ assistant_response += content
163
+ current_cell_content.append(content)
164
+
165
+ if len(tokens)==1:
166
+ create_cell=True
167
+ code_cell = "<|python_tag|>" in tokens[0]
168
+ if code_cell:
169
+ code_cell_counter +=1
170
+ else:
171
+ create_cell = False
172
+
173
+ # Update notebook in real-time
174
+ if create_cell:
175
+ if "<|python_tag|>" in tokens[0]:
176
+ notebook_data["cells"].append({
177
+ "cell_type": "code",
178
+ "execution_count": None,
179
+ "metadata": {},
180
+ "source": assistant_response,
181
+ "outputs": []
182
+ })
183
+ else:
184
+ notebook_data["cells"].append({
185
+ "cell_type": "markdown",
186
+ "metadata": {},
187
+ "source": assistant_response
188
+ })
189
+ else:
190
+ notebook_data["cells"][-1]["source"] = assistant_response
191
+ if i%8 == 0:
192
+ yield update_notebook_display(notebook_data), messages
193
+ yield update_notebook_display(notebook_data), messages
194
+
195
+ # Handle code execution
196
+ if code_cell:
197
+ notebook_data["cells"][-1]["execution_count"] = code_cell_counter
198
+
199
+
200
+ exec_result, execution = execute_code(sbx, assistant_response)
201
+ messages.append({
202
+ "role": "assistant",
203
+ "content": assistant_response,
204
+ "tool_calls": [{
205
+ "type": "function",
206
+ "function": {
207
+ "name": "code_interpreter",
208
+ "arguments": {"code": assistant_response}
209
+ }
210
+ }]
211
+ })
212
+ messages.append({"role": "ipython", "content": parse_exec_result_llm(execution)})
213
+
214
+ # Update the last code cell with execution results
215
+ notebook_data["cells"][-1]["outputs"] = parse_exec_result_nb(execution)
216
+ update_notebook_display(notebook_data)
217
+ else:
218
+ messages.append({"role": "assistant", "content": assistant_response})
219
+ if tokens[-1] == "<|eot_id|>":
220
+ break
221
+ finally:
222
+ sbx.kill()
223
+
224
+ yield update_notebook_display(notebook_data), messages