Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -22,25 +22,25 @@ if model_name == "pp3232133/pp3232133-distilgpt2-wikitext2":
|
|
22 |
tokenizer = AutoTokenizer.from_pretrained(model_name, repo_path=hf_folder)
|
23 |
model = AutoModelForCausalLM.from_pretrained(model_name, repo_path=hf_folder)
|
24 |
|
25 |
-
# Funkcja obsługująca wejście i wyjście dla interfejsu Gradio
|
26 |
-
def chatbot_interface(input_text):
|
27 |
input_ids = tokenizer.encode(input_text, return_tensors="pt")
|
28 |
chatbot_output = model.generate(input_ids, max_length=100)[0]
|
29 |
response = tokenizer.decode(chatbot_output, skip_special_tokens=True)
|
30 |
return response
|
31 |
|
32 |
-
# Interfejs Gradio dla chatbota
|
33 |
-
iface = gr.Interface(
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
)
|
41 |
-
|
42 |
-
# Uruchomienie interfejsu
|
43 |
-
iface.launch()
|
44 |
|
45 |
|
46 |
else:
|
|
|
22 |
tokenizer = AutoTokenizer.from_pretrained(model_name, repo_path=hf_folder)
|
23 |
model = AutoModelForCausalLM.from_pretrained(model_name, repo_path=hf_folder)
|
24 |
|
25 |
+
# Funkcja obsługująca wejście i wyjście dla interfejsu Gradio
|
26 |
+
def chatbot_interface(input_text):
|
27 |
input_ids = tokenizer.encode(input_text, return_tensors="pt")
|
28 |
chatbot_output = model.generate(input_ids, max_length=100)[0]
|
29 |
response = tokenizer.decode(chatbot_output, skip_special_tokens=True)
|
30 |
return response
|
31 |
|
32 |
+
# Interfejs Gradio dla chatbota
|
33 |
+
iface = gr.Interface(
|
34 |
+
fn=chatbot_interface,
|
35 |
+
inputs="text",
|
36 |
+
outputs="text",
|
37 |
+
title="Chatbot",
|
38 |
+
description="Custom chatbot based on your Hugging Face model. Start typing to chat with the bot.",
|
39 |
+
theme="compact"
|
40 |
+
)
|
41 |
+
|
42 |
+
# Uruchomienie interfejsu
|
43 |
+
iface.launch()
|
44 |
|
45 |
|
46 |
else:
|