Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,8 @@
|
|
2 |
import torch
|
3 |
import pandas as pd
|
4 |
import streamlit as st
|
5 |
-
from langchain.llms import
|
|
|
6 |
from huggingface_hub import login
|
7 |
|
8 |
# Tu token secreto de Hugging Face
|
@@ -11,7 +12,9 @@ login(huggingface_token)
|
|
11 |
|
12 |
# Cargar el modelo Llama 3.1
|
13 |
model_name = "meta-llama/llama-3.1-8b-instruct" # Asegúrate de que este sea el nombre correcto del modelo
|
14 |
-
|
|
|
|
|
15 |
|
16 |
# Interfaz de Streamlit
|
17 |
st.title("Cosine Similarity con Llama 3.1")
|
|
|
2 |
import torch
|
3 |
import pandas as pd
|
4 |
import streamlit as st
|
5 |
+
from langchain.llms import HuggingFacePipeline
|
6 |
+
from transformers import pipeline
|
7 |
from huggingface_hub import login
|
8 |
|
9 |
# Tu token secreto de Hugging Face
|
|
|
12 |
|
13 |
# Cargar el modelo Llama 3.1
|
14 |
model_name = "meta-llama/llama-3.1-8b-instruct" # Asegúrate de que este sea el nombre correcto del modelo
|
15 |
+
# Usar transformers pipeline para carga
|
16 |
+
llm_pipeline = pipeline("text-generation", model=model_name, device=0 if torch.cuda.is_available() else -1)
|
17 |
+
llm = HuggingFacePipeline(pipeline=llm_pipeline)
|
18 |
|
19 |
# Interfaz de Streamlit
|
20 |
st.title("Cosine Similarity con Llama 3.1")
|