import gradio as gr import spaces import torch import os from transformers import AutoTokenizer, AutoModel import plotly.graph_objects as go TOKEN = os.getenv("HF_TOKEN") default_model_name = "mistralai/Mistral-7B-Instruct-v0.1" tokenizer = None model = None @spaces.GPU(duration=300) def get_embedding(text, model_repo): global tokenizer, model if tokenizer is None or model is None or model.name_or_path != model_repo: try: tokenizer = AutoTokenizer.from_pretrained(model_repo) model = AutoModel.from_pretrained(model_repo, torch_dtype=torch.float16).cuda() # Set pad token to eos token if not defined if tokenizer.pad_token is None: tokenizer.pad_token = tokenizer.eos_token model.resize_token_embeddings(len(tokenizer)) except Exception as e: return f"Error loading model: {str(e)}" inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True, max_length=512).to('cuda') with torch.no_grad(): outputs = model(**inputs) return outputs.last_hidden_state.mean(dim=1).squeeze().cpu().numpy() def reduce_to_3d(embedding): return embedding[:3] @spaces.GPU def compare_embeddings(model_repo, *texts): if not model_repo: model_repo = default_model_name embeddings = [] for text in texts: if text.strip(): emb = get_embedding(text, model_repo) if isinstance(emb, str): # Error message return emb embeddings.append(emb) embeddings_3d = [reduce_to_3d(emb) for emb in embeddings] fig = go.Figure() for i, emb in enumerate(embeddings_3d): fig.add_trace(go.Scatter3d(x=[0, emb[0]], y=[0, emb[1]], z=[0, emb[2]], mode='lines+markers', name=f'Text {i+1}')) fig.update_layout(scene=dict(xaxis_title='X', yaxis_title='Y', zaxis_title='Z')) return fig def generate_text_boxes(n): return [gr.Textbox(label=f"Text {i+1}", visible=(i < n)) for i in range(10)] with gr.Blocks() as iface: gr.Markdown("# 3D Embedding Comparison") gr.Markdown("Compare the embeddings of multiple strings visualized in 3D space using a custom model.") model_repo_input = gr.Textbox(label="Model Repository", value=default_model_name, placeholder="Enter the model repository (e.g., mistralai/Mistral-7B-Instruct-v0.3)") num_texts = gr.Slider(minimum=2, maximum=10, step=1, value=2, label="Number of texts to compare") with gr.Column() as input_column: text_boxes = generate_text_boxes(2) output = gr.Plot() compare_button = gr.Button("Compare Embeddings") def update_interface(n): return [gr.update(visible=(i < n)) for i in range(10)] num_texts.change( update_interface, inputs=[num_texts], outputs=text_boxes ) compare_button.click( compare_embeddings, inputs=[model_repo_input] + text_boxes, outputs=output ) iface.launch()