Spaces:
Sleeping
Sleeping
Update pdfchatbot.py
Browse files- pdfchatbot.py +3 -3
pdfchatbot.py
CHANGED
@@ -12,7 +12,7 @@ from langchain.prompts import PromptTemplate
|
|
12 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
13 |
|
14 |
class PDFChatBot:
|
15 |
-
def __init__(self, config_path="
|
16 |
"""
|
17 |
Initialize the PDFChatBot instance.
|
18 |
|
@@ -103,9 +103,9 @@ class PDFChatBot:
|
|
103 |
self.model = AutoModelForCausalLM.from_pretrained(
|
104 |
self.config.get("autoModelForCausalLM"),
|
105 |
device_map='auto',
|
106 |
-
torch_dtype=torch.
|
107 |
token=True,
|
108 |
-
load_in_4bit=
|
109 |
)
|
110 |
|
111 |
def create_pipeline(self):
|
|
|
12 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
13 |
|
14 |
class PDFChatBot:
|
15 |
+
def __init__(self, config_path="./config.yaml"):
|
16 |
"""
|
17 |
Initialize the PDFChatBot instance.
|
18 |
|
|
|
103 |
self.model = AutoModelForCausalLM.from_pretrained(
|
104 |
self.config.get("autoModelForCausalLM"),
|
105 |
device_map='auto',
|
106 |
+
torch_dtype=torch.bfloat16,
|
107 |
token=True,
|
108 |
+
load_in_4bit=False
|
109 |
)
|
110 |
|
111 |
def create_pipeline(self):
|