Debate_Master / app.py
Sarath0x8f's picture
Update app.py
22faf28 verified
raw
history blame
2.78 kB
from huggingface_hub import InferenceClient
import gradio as gr
import base64
import datetime
client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
# Debate response function
def debate_respond(message, history: list[tuple[str, str]],
max_tokens=1024, temperature=0.4, top_p=0.95):
# System message defining assistant behavior in a debate
system_message = {
"role": "system",
"content": f"Act as a debate participant taking the position '{position}' on the topic '{topic}'. Respond professionally, thoughtfully, and convincingly, staying within the specified role."
f"If the user's point challenges your position, provide a counterargument. Maintain a respectful tone throughout the discussion."
}
messages = [system_message]
# Adding conversation history
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
# Adding the current user input
messages.append({"role": "user", "content": message})
# Generating the response
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
response += message.choices[0].delta.content
yield response
print(f"{datetime.datetime.now()}::{messages[-1]['content']}->{response}\n")
# Encode image function for logos (optional, kept for design)
def encode_image(image_path):
with open(image_path, "rb") as image_file:
return base64.b64encode(image_file.read()).decode('utf-8')
# Gradio interface
global topic, position
with gr.Blocks(theme=gr.themes.Ocean(font=[gr.themes.GoogleFont("Roboto Mono")]),
css='footer {visibility: hidden}') as demo:
gr.Markdown("# LLM Debate Participant")
with gr.Tabs():
with gr.TabItem("Debate Interface"):
with gr.Row():
topic = gr.Textbox(label="Debate Topic", placeholder="Enter the topic of the debate")
position = gr.Radio(["For", "Against"], label="Position", info="LLM's debate stance")
chatbot = gr.Chatbot(height=500)
debate_interface = gr.ChatInterface(debate_respond,
chatbot=chatbot,
examples=[
"Why do you support this stance?",
"Can you refute the opposing view on this topic?",
"What evidence supports your position?"
]
)
gr.HTML("<footer><p>LLM Debate Participant © 2024</p></footer>")
if __name__ == "__main__":
demo.launch(share=True)