Update README.md
Browse files
README.md
CHANGED
@@ -29,11 +29,18 @@ paper = read_txt_file(mmd_file_path)
|
|
29 |
idx = paper.find("## References")
|
30 |
paper = paper[:idx].strip()
|
31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
messages = [
|
33 |
{"role": "system", "content": instruction},
|
34 |
{"role": "user", "content": paper},
|
35 |
]
|
36 |
|
|
|
37 |
encodes = tokenizer.apply_chat_template(messages, return_tensors="pt")
|
38 |
encodes = encodes.to("cuda:0")
|
39 |
len_input = encodes.shape[1]
|
|
|
29 |
idx = paper.find("## References")
|
30 |
paper = paper[:idx].strip()
|
31 |
|
32 |
+
|
33 |
+
model_name = "/root/sea/"
|
34 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
35 |
+
chat_model = AutoModelForCausalLM.from_pretrained(model_name)
|
36 |
+
chat_model.to("cuda:0")
|
37 |
+
|
38 |
messages = [
|
39 |
{"role": "system", "content": instruction},
|
40 |
{"role": "user", "content": paper},
|
41 |
]
|
42 |
|
43 |
+
|
44 |
encodes = tokenizer.apply_chat_template(messages, return_tensors="pt")
|
45 |
encodes = encodes.to("cuda:0")
|
46 |
len_input = encodes.shape[1]
|