Spaces:
Running
Running
File size: 12,402 Bytes
5669147 e6fcdff 5669147 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 |
import gradio as gr
from typing import Tuple, List
import requests
url = "http://138.4.22.130/arena"
def submit_prompt(prompt: str):
return backend.router(prompt)
def start_app()-> Tuple[bool, bool, bool]:
print("Starting app")
return (
gr.update(visible=False), # landing visible
gr.update(visible=True), # app visible
gr.update(visible=False), # start_button visible
)
def change_vote(backdown: bool, id: str) -> Tuple[bool]:
print(id)
response = requests.post(url + "/v2/backdownvote", json={"backdown": backdown, "_id": id})
return (gr.update(visible=False),)
def record_vote(prompt: str, left_chat: List, right_chat: List,
left_model: str, right_model: str, vote_type: int, moreConsumingn) -> Tuple[str, bool, bool, bool, bool]:
"""Record a vote for either the left or right model"""
vote_message = "Is a tie!"
if vote_type == 0:
vote_message = "Right model wins!"
elif vote_type == 1:
vote_message = "Left model wins!"
result_msg = f"Vote recorded: {vote_message}"
response = requests.post(url + "/v2/vote", json={"vote": vote_type, "prompt": prompt,
"left_chat": left_chat, "right_chat": right_chat,
"left_model": left_model, "right_model": right_model,
})
changeVisible = False
if((moreConsuming == "left" and vote_type == 0) or (moreConsuming == "right" and vote_type == 1)):
changeVisible = True
#result, left_model, buttons[0], buttons[1], tievote_btn, model_names_row,
return (
result_msg, # result
gr.update(interactive=False), # left_vote_btn interactive
gr.update(interactive=False), # right_vote_btn interactive
gr.update(interactive=False), # tie_btn interactive
gr.update(visible=True), # model_names_row visible
gr.update(visible=True), # backdown_row visible
)
def send_prompt(prompt: str , moreConsuming) -> Tuple[List, List, str, str, bool, bool, bool, bool]:
response = requests.post(url + "/v2/query", json={"prompt": prompt})
jsonResponse = response.json()
print(jsonResponse)
if(jsonResponse["status"] == 200 and jsonResponse["message"]):
moreConsuming = jsonResponse["message"]["moreConsumption"]
return (
[{"role":"assistant", "content": jsonResponse["answers"][0]}], # left_output
[{"role": "assistant", "content": jsonResponse["answers"][1]}], # right_output
jsonResponse["models"][0], # left_model,
jsonResponse["models"][1], # right_model,
gr.update(interactive=True, visible=True),
gr.update(interactive=True, visible=True),
gr.update(interactive=True, visible=True),
gr.update(visible=False),
)
# Initialize Gradio Blocks
with gr.Blocks(css="footer{display:none !important}") as demo:
_id = gr.State("")
moreConsuming = gr.State("")
with gr.Column(visible=True) as landing:
gr.set_static_paths(paths=["static"])
with gr.Group():
gr.HTML("""
<div style="padding: 20px; font-size: 18px;">
<h2 style="font-size: 30px;">π± About This Project</h2>
<p>This space is part of the project <strong>"Sostenibilidad Generativa"</strong> π, funded by the <strong>COTEC Foundation</strong>. Our goal is to evaluate how <strong>energy awareness</strong> β‘ impacts users' evaluation of <strong>Large Language Models (LLMs)</strong>.</p>
<h2 style="font-size: 30x;">π How It Works</h2>
<ol>
<li><strong>Ask a Question</strong> π¬: Enter any question in the prompt box below.</li>
<li><strong>Compare Responses</strong> π€βοΈ: Two different LLMs will provide answers.</li>
<li><strong>Make Your Choice</strong> β
: Rate which response you think is better.</li>
<li><strong>Consider Energy Impact</strong> β‘π: For some questions, you'll see information about the models' energy consumption.</li>
</ol>
<h2 style="font-size: 30px;">β‘ Energy Information</h2>
<ul>
<li>When shown, <strong>energy consumption data</strong> π will help you understand the <strong>environmental impact</strong> π.</li>
<li>You'll need to consider: <strong>Is a better response worth the additional energy consumption?</strong> π€</li>
<li>The comparison will highlight when one model <strong>consumes more than twice</strong> the energy of the other β οΈ.</li>
</ul>
<p style="text-align: center; margin-top: 20px; font-size: 35px;">
πΏ <strong>Let's make AI more sustainable together!</strong> πβ»οΈ
</p>
</div>
""")
with gr.Column(visible=False) as app:
gr.set_static_paths(paths=["static"])
buttons = [None] * 2 # Initialize the list with None elements
with gr.Group():
gr.Image("static/logo.png", elem_id="centered", show_label=False)
with gr.Row(visible=False) as model_consumption_row:
consumption_text = gr.Textbox(label="Consumo: ", visible=True, interactive=False)
with gr.Row():
chatbot = [None] * 2 # Initialize the list with None elements
messages = ["π Left is better", "π Right is better"]
for i in range(2):
with gr.Column():
chatbot[i] = gr.Chatbot(
show_label=False, # You can set this to False to hide the label
type="messages",
elem_id="chatbot",
height=650,
show_copy_button=True,
latex_delimiters=[
{"left": "$", "right": "$", "display": False},
{"left": "$$", "right": "$$", "display": True},
{"left": r"\(", "right": r"\)", "display": False},
{"left": r"\[", "right": r"\]", "display": True},
],
)
buttons[i] = gr.Button(
value=messages[i], visible=True, interactive=False
)
with gr.Row():
for i in range(2):
with gr.Column():
gr.Textbox(show_label=False, visible=False)
#left_output = gr.Chatbot(label="A (400w π)", type="messages")
tievote_btn = gr.Button(
value="π€ It's a Tie!", visible=True, interactive=False
)
with gr.Column(visible=False) as backdown_row:
backdown_txt = gr.HTML("""<h2>Do you want to change your vote, knowing that the selected model consumes significantly more?</h2>""")
with gr.Row():
no_backdown_btn = gr.Button(value="No", visible=True, interactive=True)
backdown_btn = gr.Button(value="Yes", visible=True, interactive=True)
with gr.Row(visible=False) as model_names_row:
left_model = gr.Textbox(label="Left Model", interactive=False)
right_model = gr.Textbox(label="Right Model", interactive=False)
result = gr.Textbox(label="Result", interactive=False, visible=False)
with gr.Group():
with gr.Row():
textbox = gr.Textbox(
show_label=False,
placeholder="π Enter your prompt and press ENTER",
elem_id="input_box",
#submit_btn=True,
)
#send_btn = gr.Button(value="Send", scale=0)
previous_prompt = gr.State("")
tie_count = gr.State(0)
# Define interactions
textbox.submit(fn=lambda *args: send_prompt(*args, moreConsuming),
inputs=[textbox],
outputs=[chatbot[0], chatbot[1], left_model, right_model,
buttons[0], buttons[1], tievote_btn, model_names_row,
])
buttons[0].click(
lambda *args: record_vote(*args, 0),
inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, moreConsuming],
outputs=[result,buttons[0], buttons[1], tievote_btn, model_names_row, backdown_row]
)
buttons[1].click(
lambda *args: record_vote(*args, 1),
inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, moreConsuming],
outputs=[result,buttons[0], buttons[1], tievote_btn, model_names_row, backdown_row]
)
tievote_btn.click(
lambda *args: record_vote(*args, 2),
inputs=[textbox, chatbot[0], chatbot[1], left_model, right_model, gr.State(value=False)],
outputs=[result, buttons[0], buttons[1], tievote_btn, model_names_row,]
)
backdown_btn.click(
lambda *args: change_vote(*args, _id, True),
inputs=[],
outputs=[backdown_row]
)
no_backdown_btn.click(
lambda *args: change_vote(*args, _id, False),
inputs=[],
outputs=[backdown_row]
)
# Project Description
gr.HTML("""
<div style="padding: 20px; font-size: 18px;">
<h2 style="font-size: 30px;">π± About This Project</h2>
<p>This space is part of the project <strong>"Sostenibilidad Generativa"</strong> π, funded by the <strong>COTEC Foundation</strong>. Our goal is to evaluate how <strong>energy awareness</strong> β‘ impacts users' evaluation of <strong>Large Language Models (LLMs)</strong>.</p>
<h2 style="font-size: 30x;">π How It Works</h2>
<ol>
<li><strong>Ask a Question</strong> π¬: Enter any question in the prompt box below.</li>
<li><strong>Compare Responses</strong> π€βοΈ: Two different LLMs will provide answers.</li>
<li><strong>Make Your Choice</strong> β
: Rate which response you think is better.</li>
<li><strong>Consider Energy Impact</strong> β‘π: For some questions, you'll see information about the models' energy consumption.</li>
</ol>
<h2 style="font-size: 30px;">β‘ Energy Information</h2>
<ul>
<li>When shown, <strong>energy consumption data</strong> π will help you understand the <strong>environmental impact</strong> π.</li>
<li>You'll need to consider: <strong>Is a better response worth the additional energy consumption?</strong> π€</li>
<li>The comparison will highlight when one model <strong>consumes more than twice</strong> the energy of the other β οΈ.</li>
</ul>
<p style="text-align: center; margin-top: 20px; font-size: 35px;">
πΏ <strong>Let's make AI more sustainable together!</strong> πβ»οΈ
</p>
</div>
""")
gr.Markdown("""This space is part of a research project to study how knowledge of energy consumption influences user preferences in AI systems. It must be used only for that purpose and not for any illegal, harmful or offensive activities. Please do not upload personal or private information. The space collects and stores the questions and answers and reserves the right to distribute it under a Creative Commons Attribution (CC-BY) license."""
)
start_button = gr.Button(value="Start", visible=True, interactive=True, size= "lg", variant="primary")
start_button.click(
lambda *args: start_app(),
inputs=[],
outputs=[landing, app, start_button]
)
if __name__ == "__main__":
demo.launch(allowed_paths=["static"], show_api=False, share=False)
|