Spaces:
Sleeping
Sleeping
import os | |
from dotenv import load_dotenv | |
load_dotenv() # Load environment variables from .env file | |
import gradio as gr | |
from threading import Thread | |
import tiktoken | |
import logging | |
from src.config import Config | |
from src.logger import Logger | |
from src.project import ProjectManager | |
from src.state import AgentState | |
from src.agents import Agent | |
# Initialize core components | |
manager = ProjectManager() | |
AgentState = AgentState() | |
config = Config() | |
logger = Logger() | |
TIKTOKEN_ENC = tiktoken.get_encoding("cl100k_base") | |
# Configure logging | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger(__name__) | |
def process_message(message, base_model="gpt-3.5-turbo", project_name="default", search_engine="duckduckgo"): | |
try: | |
agent = Agent(base_model=base_model, search_engine=search_engine.lower()) | |
state = AgentState.get_latest_state(project_name) | |
if not state: | |
agent.execute(message, project_name) | |
else: | |
if AgentState.is_agent_completed(project_name): | |
agent.subsequent_execute(message, project_name) | |
else: | |
agent.execute(message, project_name) | |
# Get the latest messages | |
messages = manager.get_messages(project_name) | |
return messages[-1]["message"] if messages else "No response generated" | |
except Exception as e: | |
logger.error(f"Error processing message: {str(e)}") | |
return f"An error occurred: {str(e)}" | |
def create_gradio_interface(): | |
with gr.Blocks(title="Devika AI Assistant", theme=gr.themes.Soft()) as interface: | |
gr.Markdown(""" | |
# π€ Devika AI Assistant | |
Devika is an advanced AI coding assistant that helps you with: | |
- Writing and debugging code | |
- Creating new projects | |
- Answering programming questions | |
- And much more! | |
Simply type your request below and Devika will help you out. | |
""") | |
with gr.Row(): | |
with gr.Column(scale=2): | |
message_input = gr.Textbox( | |
label="Your Message", | |
placeholder="Type your coding request here...", | |
lines=3 | |
) | |
with gr.Row(): | |
model_dropdown = gr.Dropdown( | |
choices=["gpt-3.5-turbo", "gpt-4", "claude-3-opus"], | |
value="gpt-3.5-turbo", | |
label="Model" | |
) | |
search_engine_dropdown = gr.Dropdown( | |
choices=["DuckDuckGo", "Bing", "Google"], | |
value="DuckDuckGo", | |
label="Search Engine" | |
) | |
submit_btn = gr.Button("Send Message", variant="primary") | |
with gr.Column(scale=3): | |
output_box = gr.Markdown(label="Devika's Response") | |
# Add examples | |
gr.Examples( | |
examples=[ | |
["Create a React component for a todo list", "gpt-3.5-turbo", "DuckDuckGo"], | |
["Help me understand how to use Python decorators", "gpt-3.5-turbo", "DuckDuckGo"], | |
["Write a Node.js API endpoint for user authentication", "gpt-3.5-turbo", "DuckDuckGo"] | |
], | |
inputs=[message_input, model_dropdown, search_engine_dropdown], | |
outputs=output_box, | |
fn=lambda x, y, z: process_message(x, y, "default", z), | |
cache_examples=True | |
) | |
submit_btn.click( | |
fn=process_message, | |
inputs=[message_input, model_dropdown, gr.Textbox(value="default", visible=False), search_engine_dropdown], | |
outputs=output_box | |
) | |
return interface | |
# Create and launch the Gradio interface | |
interface = create_gradio_interface() | |
if __name__ == "__main__": | |
interface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
debug=False | |
) |