Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -2,11 +2,14 @@ import streamlit as st
|
|
2 |
from langchain.text_splitter import CharacterTextSplitter
|
3 |
from langchain.docstore.document import Document
|
4 |
from langchain.chains.summarize import load_summarize_chain
|
5 |
-
from langchain_community.llms import CTransformers
|
|
|
6 |
from langchain.callbacks.manager import CallbackManager
|
7 |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
8 |
from pypdf import PdfReader
|
9 |
|
|
|
|
|
10 |
# Page title
|
11 |
st.set_page_config(page_title='π¦π Text Summarization App')
|
12 |
st.title('π¦π Text Summarization App')
|
@@ -33,12 +36,17 @@ def load_llm():
|
|
33 |
callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
|
34 |
|
35 |
# Loading the LLM model
|
36 |
-
llm = CTransformers(
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
|
|
|
|
|
|
|
|
|
|
42 |
)
|
43 |
|
44 |
return llm
|
|
|
2 |
from langchain.text_splitter import CharacterTextSplitter
|
3 |
from langchain.docstore.document import Document
|
4 |
from langchain.chains.summarize import load_summarize_chain
|
5 |
+
# from langchain_community.llms import CTransformers
|
6 |
+
from langchain_community.llms import HuggingFaceHub
|
7 |
from langchain.callbacks.manager import CallbackManager
|
8 |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
9 |
from pypdf import PdfReader
|
10 |
|
11 |
+
HUGGINGFACEHUB_API_TOKEN = st.secrets["HUGGINGFACEHUB_API_TOKEN"]
|
12 |
+
|
13 |
# Page title
|
14 |
st.set_page_config(page_title='π¦π Text Summarization App')
|
15 |
st.title('π¦π Text Summarization App')
|
|
|
36 |
callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
|
37 |
|
38 |
# Loading the LLM model
|
39 |
+
# llm = CTransformers(
|
40 |
+
# model="llama-2-7b-chat.ggmlv3.q2_K.bin",
|
41 |
+
# model_type="llama",
|
42 |
+
# config={'max_new_tokens': 600,
|
43 |
+
# 'temperature': 0.5,
|
44 |
+
# 'context_length': 700}
|
45 |
+
# )
|
46 |
+
|
47 |
+
llm = HuggingFaceHub(
|
48 |
+
repo_id="meta-llama/Llama-2-7b-chat-hf", # Official Llama-2 model
|
49 |
+
model_kwargs={"temperature": 0.5, "max_length": 500}
|
50 |
)
|
51 |
|
52 |
return llm
|