File size: 1,737 Bytes
7103ccc 1ba32de 7103ccc 1ba32de 748826b 7103ccc 714a27c 1ba32de 416fea8 748826b 2371338 1ba32de 748826b d590a55 1ba32de d590a55 1ba32de 748826b 1ba32de 748826b 416fea8 748826b 1ba32de 5e78e4f 9006e63 1ba32de 9006e63 1ba32de 9006e63 1ba32de |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
import gradio as gr
import spaces
import torch
from transformers import AutoTokenizer, AutoModel
import plotly.graph_objects as go
model_name = "mistralai/Mistral-7B-v0.1"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = None
# Set pad token to eos token if not defined
if tokenizer.pad_token is None:
tokenizer.pad_token = tokenizer.eos_token
@spaces.GPU
def get_embedding(text):
global model
if model is None:
model = AutoModel.from_pretrained(model_name).cuda()
model.resize_token_embeddings(len(tokenizer))
inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True, max_length=512).to('cuda')
with torch.no_grad():
outputs = model(**inputs)
return outputs.last_hidden_state.mean(dim=1).squeeze().cpu().numpy()
def reduce_to_3d(embedding):
return embedding[:3]
@spaces.GPU
def compare_embeddings(text1, text2):
emb1 = get_embedding(text1)
emb2 = get_embedding(text2)
emb1_3d = reduce_to_3d(emb1)
emb2_3d = reduce_to_3d(emb2)
fig = go.Figure(data=[
go.Scatter3d(x=[0, emb1_3d[0]], y=[0, emb1_3d[1]], z=[0, emb1_3d[2]], mode='lines+markers', name='Text 1'),
go.Scatter3d(x=[0, emb2_3d[0]], y=[0, emb2_3d[1]], z=[0, emb2_3d[2]], mode='lines+markers', name='Text 2')
])
fig.update_layout(scene=dict(xaxis_title='X', yaxis_title='Y', zaxis_title='Z'))
return fig
iface = gr.Interface(
fn=compare_embeddings,
inputs=[
gr.Textbox(label="Text 1"),
gr.Textbox(label="Text 2")
],
outputs=gr.Plot(),
title="3D Embedding Comparison",
description="Compare the embeddings of two strings visualized in 3D space using Mistral 7B."
)
iface.launch() |