File size: 4,376 Bytes
45a9357 70c440e 45a9357 70c440e 45a9357 70c440e 45a9357 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
import gradio as gr
import os
import openai
from dataclasses import dataclass
@dataclass
class Args:
frequency_penalty: float = 0
max_tokens: int = 32
n: int = 1
presence_penalty: float = 0
seed: int = 42
stop: str = None
stream: bool = False
temperature: float = 0.8
top_p: float = 0.95
def get_completion(client, model_id, messages, args):
completion_args = {
"model": model_id,
"messages": messages,
"frequency_penalty": args.frequency_penalty,
"max_tokens": args.max_tokens,
"n": args.n,
"presence_penalty": args.presence_penalty,
"seed": args.seed,
"stop": args.stop,
"stream": args.stream,
"temperature": args.temperature,
"top_p": args.top_p,
}
completion_args = {
k: v for k, v in completion_args.items() if v is not None
}
try:
response = client.chat.completions.create(**completion_args)
return response
except Exception as e:
print(f"Error during API call: {e}")
return None
def chat_response(message, history, model):
# Set up OpenAI client
openai_api_key = "super-secret-token"
os.environ['OPENAI_API_KEY'] = openai_api_key
openai.api_key = openai_api_key
openai.api_base = "https://turingtest--example-vllm-openai-compatible-serve.modal.run/v1"
client = openai.OpenAI(api_key=openai_api_key, base_url=openai.api_base)
# Prepare messages
messages = [{"role": "system", "content": "You are a helpful assistant."}]
# Convert history to the correct format
for user_msg, assistant_msg in history:
messages.append({"role": "user", "content": user_msg})
if assistant_msg:
messages.append({"role": "assistant", "content": assistant_msg})
messages.append({"role": "user", "content": message})
# Set up arguments
args = Args()
# Use the correct model identifier
model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
# Get completion
response = get_completion(client, model_id, messages, args)
if response and response.choices:
return response.choices[0].message.content
else:
return f"Error: Please retry or contact support if retried more than twice."
def create_chat_interface(model):
return gr.ChatInterface(
fn=lambda message, history: chat_response(message, history, model),
chatbot=gr.Chatbot(height=400, label=f"Choice {model}"),
textbox=gr.Textbox(placeholder="Message", container=False, scale=7),
# title=f"Choice {model}",
description="",
theme="dark",
# examples=[["what's up"]],
# cache_examples=True,
retry_btn=None,
undo_btn=None,
clear_btn=None,
)
with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue", neutral_hue="slate"), head=
"""
<style>
body {
font-family: 'Calibri', sans-serif; /* Choose your desired font */
}
</style>
""") as demo:
gr.Markdown("## Turing Test Prompt Competition")
with gr.Row():
with gr.Column():
chat_a = create_chat_interface("A")
with gr.Column():
chat_b = create_chat_interface("B")
with gr.Row():
a_better = gr.Button("π A is better", scale=1)
b_better = gr.Button("π B is better", scale=1)
tie = gr.Button("π€ Tie", scale=1)
both_bad = gr.Button("π Both are bad", scale=1)
prompt_input = gr.Textbox(placeholder="Message for both...", container=False)
send_btn = gr.Button("Send to Both", variant="primary")
def send_prompt(prompt):
# This function will now return the prompt for both chatbots
return prompt, prompt, gr.update(value=""), gr.update(value="")
# Update the click and submit events
send_btn.click(
send_prompt,
inputs=[prompt_input],
outputs=[
chat_a.textbox,
chat_b.textbox,
prompt_input,
prompt_input
]
)
prompt_input.submit(
send_prompt,
inputs=[prompt_input],
outputs=[
chat_a.textbox,
chat_b.textbox,
prompt_input,
prompt_input
]
)
if __name__ == "__main__":
demo.launch(share=True) |