e-hossam96 commited on
Commit
28e33b6
Β·
1 Parent(s): 31d34d9

added UI demo

Browse files
Files changed (1) hide show
  1. main.py +245 -3
main.py CHANGED
@@ -1,6 +1,248 @@
1
- def main():
2
- print("Hello from ai-peer-conversation!")
 
 
 
3
 
4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  if __name__ == "__main__":
6
- main()
 
1
+ import time
2
+ import openai
3
+ import random
4
+ import gradio as gr
5
+ from datetime import datetime
6
 
7
 
8
+ OPENAI_API_KEY = "ollama"
9
+ OPENAI_API_BASE = "http://localhost:11434"
10
+ MODEL_NAME = "llama3.1:8b-instruct-q2_K"
11
+
12
+ llm_client = openai.AsyncClient(
13
+ api_key=OPENAI_API_KEY,
14
+ base_url=OPENAI_API_BASE,
15
+ )
16
+
17
+
18
+ # Mock AI response function (replace with your actual AI integration)
19
+ def generate_ai_response(message, system_message, agent_name):
20
+ """
21
+ Mock function to simulate AI responses.
22
+ Replace this with your actual AI API calls (OpenAI, Anthropic, etc.)
23
+ """
24
+ # Simulate thinking time
25
+ time.sleep(0.1)
26
+
27
+ # Mock responses based on agent
28
+ if agent_name == "Agent A":
29
+ responses = [
30
+ f"As Agent A, I think {message.lower()} raises interesting points about efficiency.",
31
+ f"From my perspective as Agent A, I'd like to explore {message.lower()} further.",
32
+ ]
33
+ else:
34
+ responses = [
35
+ f"Agent B perspective: {message.lower()} presents some challenges we should address.",
36
+ f"As Agent B, I have a different view on {message.lower()} - let me elaborate.",
37
+ ]
38
+
39
+ return random.choice(responses)
40
+
41
+
42
+ def start_conversation(system_msg_a, system_msg_b, num_turns=5):
43
+ """
44
+ Orchestrates the conversation between two AI agents
45
+ """
46
+ if not system_msg_a.strip() or not system_msg_b.strip():
47
+ return [
48
+ (
49
+ "System",
50
+ "⚠️ Please provide system messages for both agents before starting the conversation.",
51
+ )
52
+ ]
53
+
54
+ conversation = []
55
+ conversation.append(
56
+ (
57
+ "System",
58
+ f"πŸ€– **Conversation Started**\n\n**Agent A System Message:** {system_msg_a}\n\n**Agent B System Message:** {system_msg_b}\n\n---",
59
+ )
60
+ )
61
+
62
+ # Initial message from Agent A
63
+ current_message = (
64
+ "Hello, I'd like to start our discussion based on my system instructions."
65
+ )
66
+
67
+ for turn in range(num_turns):
68
+ # Agent A speaks
69
+ if turn == 0:
70
+ agent_a_response = generate_ai_response(
71
+ "starting our conversation", system_msg_a, "Agent A"
72
+ )
73
+ else:
74
+ agent_a_response = generate_ai_response(
75
+ current_message, system_msg_a, "Agent A"
76
+ )
77
+
78
+ conversation.append(("Agent A", agent_a_response))
79
+ yield conversation
80
+ time.sleep(0.5) # Brief pause for visual effect
81
+
82
+ # Agent B responds
83
+ agent_b_response = generate_ai_response(
84
+ agent_a_response, system_msg_b, "Agent B"
85
+ )
86
+ conversation.append(("Agent B", agent_b_response))
87
+ current_message = agent_b_response
88
+ yield conversation
89
+ time.sleep(0.5) # Brief pause for visual effect
90
+
91
+ conversation.append(("System", "🏁 **Conversation Complete**"))
92
+ yield conversation
93
+
94
+
95
+ def format_conversation_for_display(conversation):
96
+ """
97
+ Formats the conversation for the chatbot display
98
+ """
99
+ formatted = []
100
+ for speaker, message in conversation:
101
+ if speaker == "System":
102
+ formatted.append((None, message))
103
+ elif speaker == "Agent A":
104
+ formatted.append((message, None))
105
+ else: # Agent B
106
+ formatted.append((None, message))
107
+ return formatted
108
+
109
+
110
+ def save_conversation(conversation):
111
+ """
112
+ Saves the conversation to a text file
113
+ """
114
+ if not conversation:
115
+ return None, "No conversation to save."
116
+
117
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
118
+ filename = f"ai_conversation_{timestamp}.txt"
119
+
120
+ content = "AI-to-AI Conversation Log\n"
121
+ content += f"Generated on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
122
+ content += "=" * 50 + "\n\n"
123
+
124
+ for speaker, message in conversation:
125
+ if speaker == "System":
126
+ content += f"[SYSTEM] {message}\n\n"
127
+ else:
128
+ content += f"{speaker}: {message}\n\n"
129
+
130
+ # Save to file
131
+ with open(filename, "w", encoding="utf-8") as f:
132
+ f.write(content)
133
+
134
+ return filename, f"Conversation saved as {filename}"
135
+
136
+
137
+ # Custom CSS for better visual appeal
138
+ with open("static/styles.css") as css_file:
139
+ custom_css = css_file.read()
140
+
141
+ with open("static/index.html") as html_file:
142
+ custom_html = html_file.read()
143
+
144
+
145
+ # Create the Gradio interface
146
+ with gr.Blocks(css=custom_css, title="AI-to-AI Conversation Interface") as demo:
147
+ gr.HTML(custom_html)
148
+
149
+ # Store conversation data
150
+ conversation_data = gr.State([])
151
+
152
+ with gr.Tabs() as tabs:
153
+ with gr.TabItem("πŸš€ Setup & Start Conversation", id=0) as setup_tab:
154
+ with gr.Row():
155
+ with gr.Column(scale=1):
156
+ gr.HTML(
157
+ "<h3 style='color: #667eea; margin-bottom: 15px;'>πŸ€– Agent A System Message</h3>"
158
+ )
159
+ system_msg_a = gr.Textbox(
160
+ placeholder="Enter the system message for Agent A (e.g., 'You are a helpful assistant focused on creative solutions...')",
161
+ lines=4,
162
+ label="",
163
+ elem_classes=["system-input"],
164
+ )
165
+
166
+ with gr.Column(scale=1):
167
+ gr.HTML(
168
+ "<h3 style='color: #764ba2; margin-bottom: 15px;'>πŸ€– Agent B System Message</h3>"
169
+ )
170
+ system_msg_b = gr.Textbox(
171
+ placeholder="Enter the system message for Agent B (e.g., 'You are an analytical assistant focused on logical reasoning...')",
172
+ lines=4,
173
+ label="",
174
+ elem_classes=["system-input"],
175
+ )
176
+
177
+ with gr.Row():
178
+ with gr.Column(scale=1):
179
+ num_turns = gr.Slider(
180
+ minimum=1,
181
+ maximum=10,
182
+ value=5,
183
+ step=1,
184
+ label="Number of conversation turns",
185
+ info="How many back-and-forth exchanges between agents",
186
+ )
187
+
188
+ with gr.Row():
189
+ start_btn = gr.Button(
190
+ "πŸš€ Start AI-to-AI Conversation",
191
+ size="lg",
192
+ elem_classes=["start-button"],
193
+ )
194
+
195
+ with gr.TabItem("πŸ’¬ Live Conversation", id=1) as conversation_tab:
196
+ gr.HTML(
197
+ "<h3 style='text-align: center; color: #444; margin-bottom: 20px;'>Watch the AI agents converse in real-time</h3>"
198
+ )
199
+
200
+ chatbot = gr.Chatbot(
201
+ label="AI-to-AI Conversation",
202
+ height=600,
203
+ elem_classes=["conversation-box"],
204
+ avatar_images=("πŸ€–", "πŸ”·"),
205
+ )
206
+
207
+ with gr.Row():
208
+ save_btn = gr.Button(
209
+ "πŸ’Ύ Save Conversation", elem_classes=["save-button"]
210
+ )
211
+ download_file = gr.File(label="Download Conversation", visible=False)
212
+
213
+ status_msg = gr.Textbox(label="Status", interactive=False)
214
+
215
+ # Event handlers
216
+ def run_conversation(sys_a, sys_b, turns):
217
+ for conv in start_conversation(sys_a, sys_b, turns):
218
+ formatted = format_conversation_for_display(conv)
219
+ yield formatted, conv
220
+
221
+ def start_and_switch_tab(sys_a, sys_b, turns):
222
+ """Start conversation and switch to conversation tab"""
223
+ return gr.Tabs(selected=1), [], "πŸš€ Starting conversation..."
224
+
225
+ start_btn.click(
226
+ fn=start_and_switch_tab,
227
+ inputs=[system_msg_a, system_msg_b, num_turns],
228
+ outputs=[tabs, conversation_data, status_msg],
229
+ ).then(
230
+ fn=run_conversation,
231
+ inputs=[system_msg_a, system_msg_b, num_turns],
232
+ outputs=[chatbot, conversation_data],
233
+ )
234
+
235
+ def handle_save(conversation):
236
+ filename, message = save_conversation(conversation)
237
+ if filename:
238
+ return gr.update(visible=True, value=filename), message
239
+ else:
240
+ return gr.update(visible=False), message
241
+
242
+ save_btn.click(
243
+ fn=handle_save, inputs=[conversation_data], outputs=[download_file, status_msg]
244
+ )
245
+
246
+ # Launch the interface
247
  if __name__ == "__main__":
248
+ demo.launch(server_name="0.0.0.0", server_port=7860, show_error=True)