Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,11 +1,11 @@
|
|
1 |
-
from langchain.
|
2 |
from langchain.chains import LLMChain
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
from langchain.indexes import RagIndex
|
5 |
from langchain.retrievers import HuggingFaceRetriever
|
6 |
|
7 |
-
# Inicializar el modelo LLaMA 3.2 desde Hugging Face
|
8 |
-
llm =
|
9 |
|
10 |
# Crear un prompt para la similitud de coseno
|
11 |
prompt_template = (
|
@@ -36,8 +36,6 @@ result = llm_chain.run(phrase_1=phrase_1, phrase_2=phrase_2)
|
|
36 |
# Imprimir el puntaje de similitud de coseno
|
37 |
print(f"Cosine Similarity Score: {result}")
|
38 |
|
39 |
-
|
40 |
-
|
41 |
'''
|
42 |
import pandas as pd
|
43 |
from langchain.chains import LLMChain, RAGChain
|
|
|
1 |
+
from langchain.hub import HuggingFaceHub
|
2 |
from langchain.chains import LLMChain
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
from langchain.indexes import RagIndex
|
5 |
from langchain.retrievers import HuggingFaceRetriever
|
6 |
|
7 |
+
# Inicializar el modelo LLaMA 3.2 desde Hugging Face Hub
|
8 |
+
llm = HuggingFaceHub(repo_id="meta-llama/Llama-3.2", model_kwargs={"temperature": 0})
|
9 |
|
10 |
# Crear un prompt para la similitud de coseno
|
11 |
prompt_template = (
|
|
|
36 |
# Imprimir el puntaje de similitud de coseno
|
37 |
print(f"Cosine Similarity Score: {result}")
|
38 |
|
|
|
|
|
39 |
'''
|
40 |
import pandas as pd
|
41 |
from langchain.chains import LLMChain, RAGChain
|