File size: 3,980 Bytes
66ff3e9
 
8658e51
b944409
 
991d86a
2a864c8
66ff3e9
991d86a
 
 
2a23908
 
 
 
 
c1a4bd7
 
 
 
 
991d86a
 
c62ab32
 
 
d7f2c99
c62ab32
991d86a
 
 
 
 
 
 
 
 
2a864c8
991d86a
 
 
 
 
 
 
 
 
2a864c8
991d86a
 
 
 
 
 
 
 
 
4394f5b
c62ab32
 
 
 
2a864c8
c62ab32
 
 
d7f2c99
 
 
 
 
 
 
 
 
f67b086
2a864c8
d7f2c99
 
 
db0f08e
 
 
5e7fd83
 
db0f08e
5e7fd83
db0f08e
 
 
d7f2c99
db0f08e
ec9dfba
 
 
 
5e7fd83
 
 
ec9dfba
a9b57cd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c3a0d31
66ff3e9
c3a0d31
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import gradio as gr
import requests
import json
import os

# Retrieve the OpenRouter API Key from the Space secrets
API_KEY = os.getenv("OpenRouter_API_KEY")

# Define available models for selection
MODEL_OPTIONS = [
    "openai/gpt-4o-mini-2024-07-18",
    "meta-llama/llama-3.1-405b-instruct",
    "nvidia/llama-3.1-nemotron-70b-instruct",
    "qwen/qwen-2.5-7b-instruct",
    "mistralai/mistral-large-2411",
    "microsoft/phi-3-medium-128k-instruct",
    "meta-llama/llama-3.1-405b-instruct:free",
    "nousresearch/hermes-3-llama-3.1-405b:free",
    "mistralai/mistral-7b-instruct:free",
    "microsoft/phi-3-medium-128k-instruct:free",
    "liquid/lfm-40b:free"
]

# History storage
history = []

def generate_comparisons_with_history(input_text, selected_models, history_state):
    global history
    results = {}
    for model in selected_models:
        response = requests.post(
            url="https://openrouter.ai/api/v1/chat/completions",
            headers={
                "Authorization": f"Bearer {API_KEY}",
                "Content-Type": "application/json"
            },
            data=json.dumps({
                "model": model,  # Use the current model
                "messages": [{"role": "user", "content": input_text}],
                "top_p": 1,
                "temperature": 1,
                "frequency_penalty": 0,
                "presence_penalty": 0,
                "repetition_penalty": 1,
                "top_k": 0,
            })
        )
        
        # Parse the response
        if response.status_code == 200:
            try:
                response_json = response.json()
                results[model] = response_json.get("choices", [{}])[0].get("message", {}).get("content", "No content returned.")
            except json.JSONDecodeError:
                results[model] = "Error: Unable to parse response."
        else:
            results[model] = f"Error: {response.status_code}, {response.text}"
    
    # Add input and results to history
    history_entry = {
        "input": input_text,
        "selected_models": selected_models,
        "outputs": results
    }
    history.append(history_entry)
    
    # Update the history state
    history_state = history
    
    return results, history_state

def clear_history():
    global history
    history = []
    return history

# Create Gradio interface with multiple model selection and history
with gr.Blocks() as demo:
    input_text = gr.Textbox(lines=2, label="Input Text", placeholder="Enter your query here")
    selected_models = gr.CheckboxGroup(choices=MODEL_OPTIONS, label="Select Models", value=[MODEL_OPTIONS[0]])
    
    # Define output components with tabs
    with gr.Tabs() as tabs:
        # Dynamically create one gr.JSON component for each model
        output_comparisons = {}
        for model in MODEL_OPTIONS:
            output_comparisons[model] = gr.JSON(label=model)
    
    output_history = gr.JSON(label="History", elem_id="output-history")
    clear_history_button = gr.Button("Clear History")

    # Add a button to clear the history
    clear_history_button.click(clear_history, outputs=output_history)
    
    # Define the button to trigger generating comparisons
    generate_button = gr.Button("Generate Comparisons")
    
    # Ensure output_comparisons is defined before use
    generate_button.click(generate_comparisons_with_history, inputs=[input_text, selected_models, gr.State()], outputs=[*output_comparisons.values(), output_history])

    # Insert custom CSS using gr.HTML()
    gr.HTML("""
        <style>
            #output-comparisons {
                height: 300px;
                overflow: auto;
                border: 1px solid #ddd;
                padding: 10px;
            }
            #output-history {
                height: 300px;
                overflow: auto;
                border: 1px solid #ddd;
                padding: 10px;
            }
        </style>
    """)

demo.launch()