import gradio as gr import spaces import torch from transformers import AutoTokenizer, AutoModel import plotly.graph_objects as go model_name = "mistralai/Mistral-7B-v0.1" tokenizer = AutoTokenizer.from_pretrained(model_name) model = None # Set pad token to eos token if not defined if tokenizer.pad_token is None: tokenizer.pad_token = tokenizer.eos_token @spaces.GPU def get_embedding(text): global model if model is None: model = AutoModel.from_pretrained(model_name).cuda() model.resize_token_embeddings(len(tokenizer)) inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True, max_length=512).to('cuda') with torch.no_grad(): outputs = model(**inputs) return outputs.last_hidden_state.mean(dim=1).squeeze().cpu().numpy() def reduce_to_3d(embedding): return embedding[:3] @spaces.GPU def compare_embeddings(*texts): embeddings = [get_embedding(text) for text in texts if text.strip()] embeddings_3d = [reduce_to_3d(emb) for emb in embeddings] fig = go.Figure() for i, emb in enumerate(embeddings_3d): fig.add_trace(go.Scatter3d(x=[0, emb[0]], y=[0, emb[1]], z=[0, emb[2]], mode='lines+markers', name=f'Text {i+1}')) fig.update_layout(scene=dict(xaxis_title='X', yaxis_title='Y', zaxis_title='Z')) return fig def generate_text_boxes(n): return [gr.Textbox(label=f"Text {i+1}", visible=(i < n)) for i in range(10)] with gr.Blocks() as iface: gr.Markdown("# 3D Embedding Comparison") gr.Markdown("Compare the embeddings of multiple strings visualized in 3D space using Mistral 7B.") num_texts = gr.Slider(minimum=2, maximum=10, step=1, value=2, label="Number of texts to compare") with gr.Column() as input_column: text_boxes = generate_text_boxes(2) output = gr.Plot() compare_button = gr.Button("Compare Embeddings") def update_interface(n): return [gr.update(visible=(i < n)) for i in range(10)] num_texts.change( update_interface, inputs=[num_texts], outputs=text_boxes ) compare_button.click( compare_embeddings, inputs=text_boxes, outputs=output ) iface.launch()