Spaces:
Sleeping
Sleeping
from huggingface_hub import InferenceClient | |
import gradio as gr | |
import random | |
from logx import prompts | |
import os | |
import sys | |
import json | |
from typing import List, Dict | |
# Import necessary modules from other files | |
from logx.prompts import ( | |
createLlamaPrompt, | |
createSpace, | |
isPythonOrGradioAppPrompt, | |
isReactAppPrompt, | |
isStreamlitAppPrompt, | |
getWebApp, | |
getGradioApp, | |
getReactApp, | |
getStreamlitApp, | |
parseTutorial, | |
generateFiles, | |
isPythonOrGradioAppPrompt, | |
isStreamlitAppPrompt, | |
isReactAppPrompt, | |
) | |
from agent import Agent | |
from types import ( | |
Code, | |
Prompt, | |
AppType, | |
File, | |
Space, | |
Tutorial, | |
App, | |
WebApp, | |
GradioApp, | |
StreamlitApp, | |
ReactApp, | |
Code, | |
) | |
client = InferenceClient( | |
"mistralai/Mixtral-8x7B-Instruct-v0.1" | |
) | |
def run(): | |
text_output = "Some text output" | |
chatbot_output = [{"text": "Chatbot response"}] | |
return text_output, chatbot_output | |
# Ensure the function is properly linked to the event | |
interface = gr.Interface(fn=run, inputs=[...], outputs=[gr.Textbox(), gr.Chatbot()]) | |
# Define the main function | |
def main(): | |
""" | |
Main function that orchestrates the code generation process. | |
""" | |
# Load prompts from prompts.py | |
prompts = load_prompts() | |
# Initialize an Agent instance | |
agent = Agent(prompts) | |
# Get the user's input | |
user_input = input("Enter your prompt: ") | |
# Process the user's input | |
result = agent.process(user_input) | |
# Print the result | |
print(result) | |
# Function to load prompts from prompts.py | |
def load_prompts(): | |
""" | |
Loads prompts from prompts.py. | |
""" | |
prompts = { | |
"createLlamaPrompt": createLlamaPrompt, | |
"createSpace": createSpace, | |
"isPythonOrGradioAppPrompt": isPythonOrGradioAppPrompt, | |
"isReactAppPrompt": isReactAppPrompt, | |
"isStreamlitAppPrompt": isStreamlitAppPrompt, | |
"getWebApp": getWebApp, | |
"getGradioApp": getGradioApp, | |
"getReactApp": getReactApp, | |
"getStreamlitApp": getStreamlitApp, | |
"parseTutorial": parseTutorial, | |
"generateFiles": generateFiles, | |
} | |
return prompts | |
# Indentation corrected here | |
def create_prompt(app_type: str, app_name: str, app_description: str, app_features: list[str], app_dependencies: list[str], app_space: str, app_tutorial: str) -> str: | |
prompt = f""" | |
I need you to help me create a {app_type} web application. | |
The application name is: {app_name} | |
The application description is: {app_description} | |
The application features are: {app_features} | |
The application dependencies are: {app_dependencies} | |
The application space is: {app_space} | |
The application tutorial is: {app_tutorial} | |
Please generate the code for the application. | |
""" | |
return prompt | |
def format_prompt(message, history): | |
prompt = "<s>" | |
for user_prompt, bot_response in history: | |
prompt += f"[INST] {user_prompt} [/INST]" | |
prompt += f" {bot_response}</s> " | |
prompt += f"[INST] {message} [/INST]" | |
return prompt | |
def generate( | |
prompt, history, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0, | |
): | |
seed = random.randint(1,1111111111111111) | |
system_prompt=agent | |
temperature = float(temperature) | |
if temperature < 1e-2: | |
temperature = 1e-2 | |
top_p = float(top_p) | |
generate_kwargs = dict( | |
temperature=temperature, | |
max_new_tokens=max_new_tokens, | |
top_p=top_p, | |
repetition_penalty=repetition_penalty, | |
do_sample=True, | |
seed=seed, | |
) | |
additional_inputs=[ | |
gr.Dropdown( | |
label="Agents", | |
choices=[s for s in agents], | |
value=agents[0], | |
interactive=True, | |
), | |
gr.Textbox( | |
label="System Prompt", | |
max_lines=1, | |
interactive=True, | |
), | |
gr.Slider( | |
label="Temperature", | |
value=0.9, | |
minimum=0.0, | |
maximum=1.0, | |
step=0.05, | |
interactive=True, | |
info="Higher values produce more diverse outputs", | |
), | |
gr.Slider( | |
label="Max new tokens", | |
value=1048*10, | |
minimum=0, | |
maximum=1000*10, | |
step=64, | |
interactive=True, | |
info="The maximum numbers of new tokens", | |
), | |
gr.Slider( | |
label="Top-p (nucleus sampling)", | |
value=0.90, | |
minimum=0.0, | |
maximum=1, | |
step=0.05, | |
interactive=True, | |
info="Higher values sample more low-probability tokens", | |
), | |
gr.Slider( | |
label="Repetition penalty", | |
value=1.2, | |
minimum=1.0, | |
maximum=2.0, | |
step=0.05, | |
interactive=True, | |
info="Penalize repeated tokens", | |
), | |
] | |
examples=[ | |
["Create a simple web application using Flask", agents[0], None, None, None, None, ], | |
["Generate a Python script to perform a linear regression analysis", agents[2], None, None, None, None, ], | |
["Create a Dockerfile for a Node.js application", agents[1], None, None, None, None, ], | |
["Write a shell script to automate the deployment of a web application to a server", agents[3], None, None, None, None, ], | |
["Generate a SQL query to retrieve the top 10 most popular products by sales", agents[4], None, None, None, None, ], | |
["Write a Python script to generate a random password with a given length and complexity", agents[2], None, None, None, None, ], | |
["Create a simple game in Unity using C#", agents[0], None, None, None, None, ], | |
["Generate a Java program to implement a binary search algorithm", agents[2], None, None, None, None, ], | |
["Write a shell script to monitor the CPU usage of a server", agents[1], None, None, None, None, ], | |
["Create a simple web application using React and Node.js", agents[0], None, None, None, None, ], | |
["Generate a Python script to perform a sentiment analysis on a given text", agents[2], None, None, None, None, ], | |
["Write a shell script to automate the backup of a MySQL database", agents[1], None, None, None, None, ], | |
["Create a simple game in Unreal Engine using C++", agents[3], None, None, None, None, ], | |
["Generate a Java program to implement a bubble sort algorithm", agents[2], None, None, None, None, ], | |
["Write a shell script to monitor the memory usage of a server", agents[1], None, None, None, None, ], | |
["Create a simple web application using Angular and Node.js", agents[0], None, None, None, None, ], | |
["Generate a Python script to perform a text classification on a given dataset", agents[2], None, None, None, None, ], | |
["Write a shell script to automate the installation of a software package on a server", agents[1], None, None, None, None, ], | |
["Create a simple game in Godot using GDScript", agents[3], None, None, None, None, ], | |
["Generate a Java program to implement a merge sort algorithm", agents[2], None, None, None, None, ], | |
["Write a shell script to automate the cleanup of temporary files on a server", agents[1], None, None, None, None, ], | |
] | |
gr.ChatInterface( | |
fn=generate, | |
chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"), | |
additional_inputs=additional_inputs, | |
title="Mixtral 46.7B", | |
examples=examples, | |
concurrency_limit=20, | |
).launch(show_api=False) | |
# Run the main function if the script is executed directly | |
if __name__ == "__main__": | |
main() |