Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM | |
from huggingface_hub import login | |
# Initialize the Hugging Face token | |
token = 'hf_your_actual_token_here' | |
login(token=token) | |
# Initialize the text generation pipeline | |
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3") | |
model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3") | |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) | |
# Define the function to handle chat | |
def chat(message): | |
# Generate the response using the model | |
response = pipe(message, max_length=50) | |
# Extract and return the generated text | |
return response[0]['generated_text'] | |
# Create the Gradio interface | |
interface = gr.Interface( | |
fn=chat, | |
inputs=gr.inputs.Textbox(label="Enter your message"), | |
outputs="text", | |
title="Text Generation Bot", | |
description="Chat with the Mistral-7B-Instruct model to get responses to your queries." | |
) | |
# Launch the Gradio interface | |
interface.launch() |