Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -15,6 +15,7 @@ from tqdm.auto import tqdm
|
|
15 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
16 |
|
17 |
|
|
|
18 |
|
19 |
|
20 |
|
@@ -56,20 +57,19 @@ st.set_page_config(
|
|
56 |
|
57 |
|
58 |
|
59 |
-
|
60 |
@st.cache_resource
|
61 |
def load_llm_model():
|
62 |
-
# llm =
|
63 |
# task= 'text2text-generation',
|
64 |
# model_kwargs={ "device_map": "auto",
|
65 |
# "load_in_8bit": True,"max_length": 256, "temperature": 0,
|
66 |
# "repetition_penalty": 1.5})
|
67 |
|
68 |
|
69 |
-
llm =
|
70 |
task= 'text2text-generation',
|
71 |
|
72 |
-
model_kwargs={ "max_length":
|
73 |
"torch_dtype":torch.float32,
|
74 |
"repetition_penalty": 1.3})
|
75 |
return llm
|
|
|
15 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
16 |
|
17 |
|
18 |
+
from transformers import LlamaForConditionalGeneration
|
19 |
|
20 |
|
21 |
|
|
|
57 |
|
58 |
|
59 |
|
|
|
60 |
@st.cache_resource
|
61 |
def load_llm_model():
|
62 |
+
# llm = LlamaForConditionalGeneration.from_pretrained(model_id= 'PyaeSoneK/LlamaV2LegalFineTuned',
|
63 |
# task= 'text2text-generation',
|
64 |
# model_kwargs={ "device_map": "auto",
|
65 |
# "load_in_8bit": True,"max_length": 256, "temperature": 0,
|
66 |
# "repetition_penalty": 1.5})
|
67 |
|
68 |
|
69 |
+
llm = LlamaForConditionalGeneration.from_pretrained(model_id='PyaeSoneK/LlamaV2LegalFineTuned',
|
70 |
task= 'text2text-generation',
|
71 |
|
72 |
+
model_kwargs={ "max_length": 128, "temperature": 0,
|
73 |
"torch_dtype":torch.float32,
|
74 |
"repetition_penalty": 1.3})
|
75 |
return llm
|