segestic's picture
Update app.py
713aecc verified
raw
history blame
2.12 kB
import os
import gradio as gr
from gradio_client import Client
# Access the Hugging Face token from environment variables
hf_token = os.getenv("HF_TOKEN")
# Check if the token is set (optional for debugging)
if hf_token is None:
print("Hugging Face token not found. Please set the HF_TOKEN environment variable.")
else:
print("Hugging Face token successfully retrieved.")
# Initialize the Client for the Hugging Face Space with the token
client = Client("segestic/token-cost-calculator", hf_token=hf_token)
# Function to interact with the Hugging Face Space API
def interact_with_space(system_prompt, user_prompt, assistant_response, model="gpt-4o"):
# Call the API with the provided inputs
result = client.predict(
system_prompt=system_prompt,
user_prompt=user_prompt,
assistant_response=assistant_response,
model=model,
api_name="/predict",
hf_token=hf_token
)
return result
# Define the Gradio interface for the UI
def create_ui():
# Create the input and output components for the UI
with gr.Blocks() as demo:
# Inputs
system_prompt_input = gr.Textbox(label="System Prompt", placeholder="Enter system prompt here", lines=2)
user_prompt_input = gr.Textbox(label="User Prompt", placeholder="Enter user prompt here", lines=2)
assistant_response_input = gr.Textbox(label="Assistant Response", placeholder="Enter assistant response here", lines=2)
model_input = gr.Textbox(label="Model", value="gpt-4o") # Default model is gpt-4o
# Output
output = gr.Textbox(label="Response", interactive=False)
# Button to send inputs to the model and get a response
submit_btn = gr.Button("Get Response")
# When the button is clicked, interact with the Space and display the result
submit_btn.click(interact_with_space,
inputs=[system_prompt_input, user_prompt_input, assistant_response_input, model_input],
outputs=[output])
return demo
# Launch the Gradio UI
ui = create_ui()
ui.launch()