3dembed / app.py
Sergidev's picture
Revert
1ba32de verified
raw
history blame
1.74 kB
import gradio as gr
import spaces
import torch
from transformers import AutoTokenizer, AutoModel
import plotly.graph_objects as go
model_name = "mistralai/Mistral-7B-v0.1"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = None
# Set pad token to eos token if not defined
if tokenizer.pad_token is None:
tokenizer.pad_token = tokenizer.eos_token
@spaces.GPU
def get_embedding(text):
global model
if model is None:
model = AutoModel.from_pretrained(model_name).cuda()
model.resize_token_embeddings(len(tokenizer))
inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True, max_length=512).to('cuda')
with torch.no_grad():
outputs = model(**inputs)
return outputs.last_hidden_state.mean(dim=1).squeeze().cpu().numpy()
def reduce_to_3d(embedding):
return embedding[:3]
@spaces.GPU
def compare_embeddings(text1, text2):
emb1 = get_embedding(text1)
emb2 = get_embedding(text2)
emb1_3d = reduce_to_3d(emb1)
emb2_3d = reduce_to_3d(emb2)
fig = go.Figure(data=[
go.Scatter3d(x=[0, emb1_3d[0]], y=[0, emb1_3d[1]], z=[0, emb1_3d[2]], mode='lines+markers', name='Text 1'),
go.Scatter3d(x=[0, emb2_3d[0]], y=[0, emb2_3d[1]], z=[0, emb2_3d[2]], mode='lines+markers', name='Text 2')
])
fig.update_layout(scene=dict(xaxis_title='X', yaxis_title='Y', zaxis_title='Z'))
return fig
iface = gr.Interface(
fn=compare_embeddings,
inputs=[
gr.Textbox(label="Text 1"),
gr.Textbox(label="Text 2")
],
outputs=gr.Plot(),
title="3D Embedding Comparison",
description="Compare the embeddings of two strings visualized in 3D space using Mistral 7B."
)
iface.launch()