Spaces:
Sleeping
Sleeping
File size: 4,650 Bytes
66ff3e9 8658e51 b944409 f67b086 b944409 991d86a f67b086 66ff3e9 991d86a 2a23908 c1a4bd7 991d86a c62ab32 2c87791 c62ab32 991d86a 2c87791 991d86a 4394f5b c62ab32 f67b086 c62ab32 f67b086 66ff3e9 2c87791 f67b086 2c87791 f67b086 2c87791 f67b086 b1bf2d6 f67b086 2c87791 f67b086 66ff3e9 2c87791 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
import gradio as gr
import requests
import json
import os
import datetime
# Retrieve the OpenRouter API Key from the Space secrets
API_KEY = os.getenv("OpenRounter_API_KEY")
# Define available models for selection
MODEL_OPTIONS = [
"openai/gpt-4o-mini-2024-07-18",
"meta-llama/llama-3.1-405b-instruct",
"nvidia/llama-3.1-nemotron-70b-instruct",
"qwen/qwen-2.5-7b-instruct",
"mistralai/mistral-large-2411",
"microsoft/phi-3-medium-128k-instruct",
"meta-llama/llama-3.1-405b-instruct:free",
"nousresearch/hermes-3-llama-3.1-405b:free",
"mistralai/mistral-7b-instruct:free",
"microsoft/phi-3-medium-128k-instruct:free",
"liquid/lfm-40b:free"
]
# History storage
history = []
def generate_model_outputs_with_history(input_text, selected_models):
global history
results = {}
for model in selected_models:
response = requests.post(
url="https://openrouter.ai/api/v1/chat/completions",
headers={
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
},
data=json.dumps({
"model": model,
"messages": [{"role": "user", "content": input_text}],
"top_p": 1,
"temperature": 1,
"frequency_penalty": 0,
"presence_penalty": 0,
"repetition_penalty": 1,
"top_k": 0,
})
)
# Parse the response
if response.status_code == 200:
try:
response_json = response.json()
results[model] = response_json.get("choices", [{}])[0].get("message", {}).get("content", "No content returned.")
except json.JSONDecodeError:
results[model] = "Error: Unable to parse response."
else:
results[model] = f"Error: {response.status_code}, {response.text}"
# Add input and results to history
history_entry = {
"input": input_text,
"selected_models": selected_models,
"outputs": results,
"timestamp": str(datetime.datetime.now())
}
history.append(history_entry)
return results # Return the results for the selected models
# Create a dynamic number of outputs based on model selection
def create_outputs(selected_models):
return [
gr.Textbox(
label=f"Output from {model}",
interactive=False,
lines=5, # Adjust lines as needed
max_lines=10, # Max lines before scrolling
show_label=False, # Hide label in the tabbed view
elem_id=f"output_{model}", # Unique ID for styling
css=".output-window { overflow-y: auto; max-height: 200px; }" # Styling for scrollable output
)
for model in selected_models
]
def clear_history():
global history
history = []
return "History Cleared!", []
def export_history():
global history
# Save history to a file (e.g., JSON)
file_name = f"history_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
with open(file_name, 'w') as f:
json.dump(history, f, indent=4)
return f"History exported to {file_name}", []
# Gradio interface with dynamic outputs and history
with gr.Blocks() as demo:
with gr.Row():
input_text = gr.Textbox(lines=2, label="Input Text", placeholder="Enter your query here")
selected_models = gr.CheckboxGroup(choices=MODEL_OPTIONS, label="Select Models", value=[MODEL_OPTIONS[0]])
output_placeholder = gr.State([]) # Placeholder for dynamic output components
history_placeholder = gr.State(history) # Maintain history state
# Button to generate outputs and maintain history
generate_button = gr.Button("Generate Outputs")
def generate_and_update(input_text, selected_models):
results = generate_model_outputs_with_history(input_text, selected_models)
output_components = create_outputs(selected_models)
return output_components, results
generate_button.click(
fn=generate_and_update,
inputs=[input_text, selected_models],
outputs=[output_placeholder, history_placeholder]
)
# Clear History button
clear_history_button = gr.Button("Clear History")
clear_history_button.click(fn=clear_history, outputs=[gr.Textbox(value="History Cleared!"), history_placeholder])
# Export History button
export_history_button = gr.Button("Export History")
export_history_button.click(fn=export_history, outputs=[gr.Textbox(value="History Exported Successfully!")])
demo.launch()
|