server-2 / app.py
BICORP's picture
Update app.py
e27b980 verified
raw
history blame
2.58 kB
import gradio as gr
from huggingface_hub import InferenceClient
# Default client with the first model
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
# Function to switch between models based on selection
def switch_client(model_name: str):
return InferenceClient(model_name)
def respond(
message,
history: list[dict],
system_message,
max_tokens,
temperature,
top_p,
model_name
):
# Switch client based on model selection
global client
client = switch_client(model_name)
messages = [{"role": "system", "content": system_message}]
for val in history:
messages.append({"role": val['role'], "content": val['content']})
messages.append({"role": "user", "content": message})
# Get the response from the model
response = client.chat_completion(
messages,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
)
# Extract the content from the response
final_response = response.choices[0].message['content']
return final_response
# Model names and their pseudonyms
model_choices = [
("mistralai/Mistral-7B-Instruct-v0.3", "Lake 1 Base")
]
# Convert pseudonyms to model names for the dropdown
pseudonyms = [model[1] for model in model_choices]
# Function to handle model selection and pseudonyms
def respond_with_pseudonym(
message,
history: list[dict],
system_message,
max_tokens,
temperature,
top_p,
selected_pseudonym
):
# Find the actual model name from the pseudonym
model_name = next(model[0] for model in model_choices if model[1] == selected_pseudonym)
# Call the existing respond function
response = respond(message, history, system_message, max_tokens, temperature, top_p, model_name)
# No longer adding the pseudonym at the end of the response
return response
# Gradio Chat Interface
demo = gr.ChatInterface(
respond_with_pseudonym,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
gr.Dropdown(pseudonyms, label="Select Model", value=pseudonyms[0]) # Pseudonym selection dropdown
],
)
if __name__ == "__main__":
demo.launch()