Update app.py
Browse files
app.py
CHANGED
@@ -5,27 +5,27 @@ from threading import Thread
|
|
5 |
import gradio as gr
|
6 |
import torch
|
7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
-
# lora_folder = ''
|
10 |
-
# model_folder = ''
|
11 |
-
#
|
12 |
-
# config = PeftConfig.from_pretrained(("Junity/Genshin-World-Model" if lora_folder == ''
|
13 |
-
# else lora_folder),
|
14 |
-
# trust_remote_code=True)
|
15 |
-
# model = AutoModelForCausalLM.from_pretrained(("baichuan-inc/Baichuan-13B-Base" if model_folder == ''
|
16 |
-
# else model_folder),
|
17 |
-
# torch_dtype=torch.float16,
|
18 |
-
# device_map="auto",
|
19 |
-
# trust_remote_code=True)
|
20 |
-
# model = PeftModel.from_pretrained(model,
|
21 |
-
# ("Junity/Genshin-World-Model" if lora_folder == ''
|
22 |
-
# else lora_folder),
|
23 |
-
# device_map="auto",
|
24 |
-
# torch_dtype=torch.float16,
|
25 |
-
# trust_remote_code=True)
|
26 |
-
# tokenizer = AutoTokenizer.from_pretrained(("baichuan-inc/Baichuan-13B-Base" if model_folder == ''
|
27 |
-
# else model_folder),
|
28 |
-
# trust_remote_code=True)
|
29 |
history = []
|
30 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
31 |
|
|
|
5 |
import gradio as gr
|
6 |
import torch
|
7 |
|
8 |
+
lora_folder = ''
|
9 |
+
model_folder = ''
|
10 |
+
|
11 |
+
config = PeftConfig.from_pretrained(("Junity/Genshin-World-Model" if lora_folder == ''
|
12 |
+
else lora_folder),
|
13 |
+
trust_remote_code=True)
|
14 |
+
model = AutoModelForCausalLM.from_pretrained(("baichuan-inc/Baichuan-13B-Base" if model_folder == ''
|
15 |
+
else model_folder),
|
16 |
+
torch_dtype=torch.float16,
|
17 |
+
device_map="auto",
|
18 |
+
trust_remote_code=True)
|
19 |
+
model = PeftModel.from_pretrained(model,
|
20 |
+
("Junity/Genshin-World-Model" if lora_folder == ''
|
21 |
+
else lora_folder),
|
22 |
+
device_map="auto",
|
23 |
+
torch_dtype=torch.float16,
|
24 |
+
trust_remote_code=True)
|
25 |
+
tokenizer = AutoTokenizer.from_pretrained(("baichuan-inc/Baichuan-13B-Base" if model_folder == ''
|
26 |
+
else model_folder),
|
27 |
+
trust_remote_code=True)
|
28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
history = []
|
30 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
31 |
|