File size: 3,575 Bytes
66ff3e9
 
8658e51
b944409
 
991d86a
b1bf2d6
66ff3e9
991d86a
 
 
2a23908
 
 
 
 
c1a4bd7
 
 
 
 
991d86a
 
c62ab32
 
 
2c87791
c62ab32
991d86a
 
 
 
 
 
 
 
 
2c87791
991d86a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4394f5b
c62ab32
 
 
 
b1bf2d6
c62ab32
 
 
2c87791
66ff3e9
2c87791
 
b1bf2d6
2c87791
 
 
 
 
 
 
 
 
 
b1bf2d6
 
2c87791
 
 
 
 
 
 
 
b1bf2d6
 
2c87791
 
b1bf2d6
 
 
66ff3e9
2c87791
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import gradio as gr
import requests
import json
import os

# Retrieve the OpenRouter API Key from the Space secrets
API_KEY = os.getenv("OpenRouter_API_KEY")

# Define available models for selection
MODEL_OPTIONS = [
    "openai/gpt-4o-mini-2024-07-18",
    "meta-llama/llama-3.1-405b-instruct",
    "nvidia/llama-3.1-nemotron-70b-instruct",
    "qwen/qwen-2.5-7b-instruct",
    "mistralai/mistral-large-2411",
    "microsoft/phi-3-medium-128k-instruct",
    "meta-llama/llama-3.1-405b-instruct:free",
    "nousresearch/hermes-3-llama-3.1-405b:free",
    "mistralai/mistral-7b-instruct:free",
    "microsoft/phi-3-medium-128k-instruct:free",
    "liquid/lfm-40b:free"
]

# History storage
history = []

def generate_model_outputs_with_history(input_text, selected_models):
    global history
    results = {}
    for model in selected_models:
        response = requests.post(
            url="https://openrouter.ai/api/v1/chat/completions",
            headers={
                "Authorization": f"Bearer {API_KEY}",
                "Content-Type": "application/json"
            },
            data=json.dumps({
                "model": model,
                "messages": [{"role": "user", "content": input_text}],
                "top_p": 1,
                "temperature": 1,
                "frequency_penalty": 0,
                "presence_penalty": 0,
                "repetition_penalty": 1,
                "top_k": 0,
            })
        )
        # Parse the response
        if response.status_code == 200:
            try:
                response_json = response.json()
                results[model] = response_json.get("choices", [{}])[0].get("message", {}).get("content", "No content returned.")
            except json.JSONDecodeError:
                results[model] = "Error: Unable to parse response."
        else:
            results[model] = f"Error: {response.status_code}, {response.text}"
    
    # Add input and results to history
    history_entry = {
        "input": input_text,
        "selected_models": selected_models,
        "outputs": results
    }
    history.append(history_entry)
    
    return [results.get(model, "No output") for model in selected_models], history

# Create a dynamic number of outputs based on model selection
def create_outputs(selected_models):
    return [gr.Textbox(label=f"Output from {model}", interactive=False) for model in selected_models]

# Gradio interface with dynamic outputs and history
with gr.Blocks() as demo:
    with gr.Row():
        input_text = gr.Textbox(lines=2, label="Input Text", placeholder="Enter your query here")
        selected_models = gr.CheckboxGroup(choices=MODEL_OPTIONS, label="Select Models", value=[MODEL_OPTIONS[0]])

    output_placeholder = gr.State([])  # Placeholder for dynamic output components
    history_placeholder = gr.State(history)  # Maintain history state

    def update_outputs_and_history(selected_models):
        # Dynamically create outputs for each selected model
        output_components = create_outputs(selected_models)
        output_placeholder.set(output_components)
        return output_components

    # Button to generate outputs and maintain history
    generate_button = gr.Button("Generate Outputs")
    generate_button.click(
        fn=generate_model_outputs_with_history,
        inputs=[input_text, selected_models],
        outputs=[update_outputs_and_history(selected_models), history_placeholder],
    )

    # Display the history
    with gr.Row():
        gr.JSON(label="History", value=history_placeholder)

demo.launch()