Update app.py
Browse files
app.py
CHANGED
@@ -229,10 +229,11 @@ model_name = 'facebook/blenderbot-400M-distill'
|
|
229 |
tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
|
230 |
model = BlenderbotForConditionalGeneration.from_pretrained(model_name)
|
231 |
|
232 |
-
def func (message
|
233 |
inputs = tokenizer(message, return_tensors="pt")
|
234 |
result = model.generate(**inputs)
|
235 |
-
|
|
|
236 |
|
237 |
|
238 |
|
@@ -273,7 +274,7 @@ def callChains(current_message):
|
|
273 |
topic_sale_inform_result = topic_sale_inform(current_message)
|
274 |
#conversation.append_response("The Big lebowski.")
|
275 |
#conversation.add_user_input("Is it good?")
|
276 |
-
final_answer =
|
277 |
return final_answer, sentiment_analysis_result, topic_sale_inform_result
|
278 |
|
279 |
chat_bot = gr.Interface(fn=callChains , inputs="textbox", outputs=["textbox","textbox","textbox"], title="Conversation Bot with extra")
|
|
|
229 |
tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
|
230 |
model = BlenderbotForConditionalGeneration.from_pretrained(model_name)
|
231 |
|
232 |
+
def func (message):
|
233 |
inputs = tokenizer(message, return_tensors="pt")
|
234 |
result = model.generate(**inputs)
|
235 |
+
print(result)
|
236 |
+
return tokenizer.decode(result[0])
|
237 |
|
238 |
|
239 |
|
|
|
274 |
topic_sale_inform_result = topic_sale_inform(current_message)
|
275 |
#conversation.append_response("The Big lebowski.")
|
276 |
#conversation.add_user_input("Is it good?")
|
277 |
+
final_answer = func(current_message)
|
278 |
return final_answer, sentiment_analysis_result, topic_sale_inform_result
|
279 |
|
280 |
chat_bot = gr.Interface(fn=callChains , inputs="textbox", outputs=["textbox","textbox","textbox"], title="Conversation Bot with extra")
|