Spaces:
Sleeping
Sleeping
import gradio as gr | |
from sentence_transformers import SentenceTransformer, util | |
import openai | |
import os | |
os.environ["TOKENIZERS_PARALLELISM"] = "false" | |
# Initialize paths and model identifiers for easy configuration and maintenance | |
filename = "output_topic_details.txt" # Path to the file storing music-specific details | |
retrieval_model_name = 'output/sentence-transformer-finetuned/' | |
openai.api_key = os.environ["OPENAI_API_KEY"] | |
system_message = "Your name is Fillira, and you are an energetic, casual, and friendly chatbot that invents creative music playlist titles based on the genres and artists of the songs included. The genre is the overall style and theme of the music, and the artists are the creators of songs based on that genre. Please do not mix them up." | |
# Initial system message to set the behavior of the assistant | |
messages = [{"role": "system", "content": system_message}] | |
# Attempt to load the necessary models and provide feedback on success or failure | |
try: | |
retrieval_model = SentenceTransformer(retrieval_model_name) | |
print("Models loaded successfully.") | |
except Exception as e: | |
print(f"Failed to load models: {e}") | |
def load_and_preprocess_text(filename): | |
""" | |
Load and preprocess text from a file, removing empty lines and stripping whitespace. | |
""" | |
try: | |
with open(filename, 'r', encoding='utf-8') as file: | |
segments = [line.strip() for line in file if line.strip()] | |
print("Text loaded and preprocessed successfully.") | |
return segments | |
except Exception as e: | |
print(f"Failed to load or preprocess text: {e}") | |
return [] | |
segments = load_and_preprocess_text(filename) | |
def find_relevant_segment(user_query, segments): | |
""" | |
Find the most relevant text segment for a user's query using cosine similarity among sentence embeddings. | |
This version finds the best match based on the content of the query. | |
""" | |
try: | |
# Lowercase the query for better matching | |
lower_query = user_query.lower() | |
# Encode the query and the segments | |
query_embedding = retrieval_model.encode(lower_query) | |
segment_embeddings = retrieval_model.encode(segments) | |
# Compute cosine similarities between the query and the segments | |
similarities = util.pytorch_cos_sim(query_embedding, segment_embeddings)[0] | |
# Find the index of the most similar segment | |
best_idx = similarities.argmax() | |
# Return the most relevant segment | |
return segments[best_idx] | |
except Exception as e: | |
print(f"Error in finding relevant segment: {e}") | |
return "" | |
def generate_response(user_query, relevant_segment): | |
""" | |
Generate a response emphasizing the bot's friendliness and capability in providing playlist titles. | |
""" | |
try: | |
user_message = f"Here's a playlist name: {relevant_segment}" | |
# Append user's message to messages list | |
messages.append({"role": "user", "content": user_message}) | |
response = openai.ChatCompletion.create( | |
model="gpt-3.5-turbo", | |
messages=messages, | |
max_tokens=150, | |
temperature=0.2, | |
top_p=1, | |
frequency_penalty=0, | |
presence_penalty=0 | |
) | |
# Extract the response text | |
output_text = response['choices'][0]['message']['content'].strip() | |
# Append assistant's message to messages list for context | |
messages.append({"role": "assistant", "content": output_text}) | |
return output_text | |
except Exception as e: | |
print(f"Error in generating response: {e}") | |
return f"Error in generating response: {e}" | |
def query_model(question): | |
""" | |
Process a question, find relevant information, and generate a response. | |
""" | |
if question == "": | |
return "Hello, I'm Fillira!" | |
relevant_segment = find_relevant_segment(question, segments) | |
if not relevant_segment: | |
return "Hmmm, that doesn't seem to be in my database. I'm sorry!" | |
response = generate_response(question, relevant_segment) | |
return response | |
# Define the welcome message and specific topics the chatbot can provide information about | |
welcome_message = """ | |
# Fillira | |
### Your music enthusiast from the future! | |
""" | |
topics = """ | |
Hey, it's Fillira! I'm here to help you create an amazing playlist title! Just tell me what genre or artist is in the playlist, and I'll do what I do best! | |
""" | |
# Setup the Gradio Blocks interface with custom layout components | |
with gr.Blocks(theme='shivi/calm_seafoam') as demo: | |
gr.Markdown(welcome_message) # Display the formatted welcome message | |
with gr.Row(): | |
with gr.Column(): | |
gr.Markdown(topics) # Show the topics on the left side | |
with gr.Row(): | |
with gr.Column(): | |
question = gr.Textbox(label="Need anything?", placeholder="Talk to me...") | |
answer = gr.Textbox(label="Fillira", placeholder="", interactive=False, lines=10) | |
submit_button = gr.Button("Shoot!") | |
submit_button.click(fn=query_model, inputs=question, outputs=answer) | |
# Launch the Gradio app to allow user interaction | |
demo.launch(share=True) | |