Commit
·
7433ede
1
Parent(s):
cc5beb0
Update README.md
Browse files
README.md
CHANGED
@@ -77,16 +77,23 @@ import sys
|
|
77 |
model_path = "./" # You can modify the path for storing the local model
|
78 |
model = AutoModelForCausalLM.from_pretrained(model_path)
|
79 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
|
|
|
|
|
|
80 |
print("Human:")
|
81 |
line = input()
|
82 |
while line:
|
83 |
-
|
84 |
-
|
|
|
|
|
85 |
outputs = model.generate(input_ids, max_new_tokens=200, do_sample = True, top_k = 30, top_p = 0.85, temperature = 0.35, repetition_penalty=1.2)
|
86 |
rets = tokenizer.batch_decode(outputs, skip_special_tokens=True)
|
87 |
-
print("Assistant:\n" + rets[0].strip().replace(
|
|
|
88 |
print("\n------------------------------------------------\nHuman:")
|
89 |
line = input()
|
|
|
90 |
```
|
91 |
|
92 |
## Limitations
|
|
|
77 |
model_path = "./" # You can modify the path for storing the local model
|
78 |
model = AutoModelForCausalLM.from_pretrained(model_path)
|
79 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
80 |
+
history = "" # save dialog history
|
81 |
+
max_history_length = 1024
|
82 |
+
|
83 |
print("Human:")
|
84 |
line = input()
|
85 |
while line:
|
86 |
+
history += 'Human: ' + line.strip() + '\n\nAssistant:'
|
87 |
+
if len(history) > max_history_length:
|
88 |
+
history = history[-1024:]
|
89 |
+
input_ids = tokenizer(history, return_tensors="pt").input_ids
|
90 |
outputs = model.generate(input_ids, max_new_tokens=200, do_sample = True, top_k = 30, top_p = 0.85, temperature = 0.35, repetition_penalty=1.2)
|
91 |
rets = tokenizer.batch_decode(outputs, skip_special_tokens=True)
|
92 |
+
print("Assistant:\n" + rets[0].strip().replace(history, ""))
|
93 |
+
history += "\n" + rets[0].strip().replace(history, "")
|
94 |
print("\n------------------------------------------------\nHuman:")
|
95 |
line = input()
|
96 |
+
|
97 |
```
|
98 |
|
99 |
## Limitations
|