Spaces:
Runtime error
Runtime error
fix
Browse files- transformerschat.py +3 -3
transformerschat.py
CHANGED
@@ -18,7 +18,7 @@ def load():
|
|
18 |
model, tokenizer = load()
|
19 |
|
20 |
|
21 |
-
def ask(
|
22 |
messages = [
|
23 |
{
|
24 |
'role': 'system',
|
@@ -29,9 +29,9 @@ def ask(model, system_prompt, pre_prompt, question):
|
|
29 |
'content': f"{question}",
|
30 |
},
|
31 |
]
|
32 |
-
logger.debug(f"<< {
|
33 |
inputs = tokenizer(question, return_tensors="pt", return_attention_mask=False)
|
34 |
outputs = model.generate(**inputs, max_length=200)
|
35 |
answer = tokenizer.batch_decode(outputs)[0]
|
36 |
-
logger.debug(f">> {
|
37 |
return answer
|
|
|
18 |
model, tokenizer = load()
|
19 |
|
20 |
|
21 |
+
def ask(_, system_prompt, pre_prompt, question):
|
22 |
messages = [
|
23 |
{
|
24 |
'role': 'system',
|
|
|
29 |
'content': f"{question}",
|
30 |
},
|
31 |
]
|
32 |
+
logger.debug(f"<< {openhermes} << {question}")
|
33 |
inputs = tokenizer(question, return_tensors="pt", return_attention_mask=False)
|
34 |
outputs = model.generate(**inputs, max_length=200)
|
35 |
answer = tokenizer.batch_decode(outputs)[0]
|
36 |
+
logger.debug(f">> {openhermes} >> {answer}")
|
37 |
return answer
|