Spaces:
Runtime error
Runtime error
import gradio as gr | |
from llama_cpp import Llama | |
from huggingface_hub import hf_hub_download | |
# Fetch the model file from Hugging Face Hub | |
model_repo = "SupermanRX/moderateTherapistModel" # Replace with your repo name | |
model_file = "model.gguf" # Replace with your GGUF model file name in the repo | |
# Download the model file to the local environment | |
model_path = hf_hub_download(repo_id=model_repo, filename=model_file) | |
# Load the GGUF model | |
llm = Llama(model_path=model_path) | |
# Define the chatbot function | |
def chatbot(input_text): | |
output = llm(prompt=input_text, max_tokens=200) | |
return output['choices'][0]['text'] | |
# Create Gradio interface | |
with gr.Blocks() as demo: | |
chatbot_ui = gr.Chatbot() | |
textbox = gr.Textbox(label="Type your message here:") | |
submit = gr.Button("Send") | |
# Handle user interaction | |
def user_interaction(input_text, chat_history): | |
response = chatbot(input_text) | |
chat_history.append((input_text, response)) | |
return chat_history, "" | |
submit.click(user_interaction, [textbox, chatbot_ui], [chatbot_ui, textbox]) | |
demo.launch() | |