File size: 3,683 Bytes
ac4c7e0
 
146e8fc
 
14bb3cc
ac4c7e0
146e8fc
ac4c7e0
146e8fc
 
ac4c7e0
 
146e8fc
ac4c7e0
 
 
 
 
 
 
146e8fc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ac4c7e0
146e8fc
 
 
ac4c7e0
146e8fc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ac4c7e0
146e8fc
ac4c7e0
 
146e8fc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import gradio as gr
import pandas as pd
from transformers import pipeline
from load_models import models_and_tokenizers, models_checkpoints
import spaces

choice = {"ModelA": "", "ModelB": ""}

dff = pd.read_csv("models.csv")
dff.to_html("tab.html")

def refreshfn() -> gr.HTML:
    df = pd.read_csv("models.csv")
    df.to_html("tab.html")
    f = open("tab.html")
    content = f.read()
    f.close()
    t = gr.HTML(content)
    return t

def rewrite_csv_ordered_by_winning_rate(csv_path):
    # Read the input CSV
    df = pd.read_csv(csv_path)
    
    # Sort the DataFrame by WINNING_RATE in descending order
    df_sorted = df.sort_values(by="WINNING_RATE", ascending=False)
    
    # Save the sorted DataFrame to a new CSV file
    df_sorted.to_csv(csv_path, index=False)

@spaces.GPU(duration=200)
def run_inference(pipeline, prompt):
    response = pipeline(prompt)
    bot_message = response[0]["generated_text"]
    return bot_message

def modelA_button():
    global choice
    df = pd.read_csv("models.csv")
    df.loc[df["MODEL"] == choice["ModelA"], "MATCHES_WON"] += 1
    df.loc[df["MODEL"] == choice["ModelA"], "WINNING_RATE"] = df.loc[df["MODEL"] == choice["ModelA"], "MATCHES_WON"]/df.loc[df["MODEL"] == choice["ModelA"], "MATCHES_PLAYED"]  
    df.to_csv("models.csv")
    rewrite_csv_ordered_by_winning_rate("models.csv")

def modelB_button():
    global choice
    df = pd.read_csv("models.csv")
    df.loc[df["MODEL"] == choice["ModelB"], "MATCHES_WON"] += 1
    df.loc[df["MODEL"] == choice["ModelB"], "WINNING_RATE"] = df.loc[df["MODEL"] == choice["ModelB"], "MATCHES_WON"]/df.loc[df["MODEL"] == choice["ModelB"], "MATCHES_PLAYED"]  
    df.to_csv("models.csv")
    rewrite_csv_ordered_by_winning_rate("models.csv")
      

def reply(modelA, modelB, prompt):
    global choice
    choice["ModelA"] = modelA
    choice["ModelB"] = modelB
    df = pd.read_csv("models.csv")
    df.loc[df["MODEL"] == modelA, "MATCHES_PLAYED"] += 1
    df.loc[df["MODEL"] == modelB, "MATCHES_PLAYED"] += 1
    df.to_csv("models.csv", index=False)
    pipeA = pipeline("text-generation", model=models_and_tokenizers[modelA][0], tokenizer=models_and_tokenizers[modelA][1], max_new_tokens=512, repetition_penalty=1.5, temperature=0.5, device="cuda")
    pipeB = pipeline("text-generation", model=models_and_tokenizers[modelB][0], tokenizer=models_and_tokenizers[modelB][1], max_new_tokens=512, repetition_penalty=1.5, temperature=0.5, device="cuda")
    responseA = run_inference(pipeA, prompt)    
    responseB = run_inference(pipeB, prompt)    
    return responseA, responseB

modelA_dropdown = gr.Dropdown(models_checkpoints, label="Model A", info="Choose the first model for the battle!")
modelB_dropdown = gr.Dropdown(models_checkpoints, label="Model B", info="Choose the second model for the battle!")
prompt_textbox = gr.Textbox(label="Prompt", value="Is pineapple pizza sacrilegious?")

demo0 = gr.Interface(fn=reply, inputs=[modelA_dropdown, modelB_dropdown, prompt_textbox], outputs=[gr.Markdown(label="Model A response"), gr.Markdown(label="Model B response")])

with gr.Blocks() as demo1:
    iface = demo0
    btnA = gr.Button("Vote for Model A!") 
    btnB = gr.Button("Vote for Model B!")
    btnA.click(modelA_button, inputs=None, outputs=None) 
    btnB.click(modelB_button, inputs=None, outputs=None) 

with gr.Blocks() as demo2:
    f = open("tab.html")
    content = f.read()
    f.close()
    t = gr.HTML(content)
    btn = gr.Button("Refresh")
    btn.click(fn=refreshfn, inputs=None, outputs=t)

demo = gr.TabbedInterface([demo1, demo2], ["Chat Arena", "Leaderboard"])

if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860)