Spaces:
Runtime error
Runtime error
Kirill Gelvan
commited on
Commit
·
7f3bae1
1
Parent(s):
6966380
new names and samples
Browse files
app.py
CHANGED
@@ -3,23 +3,22 @@ import gradio as gr
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
from util_funcs import get_length_param
|
5 |
|
6 |
-
def chat_function(
|
7 |
|
8 |
-
input_user =
|
9 |
|
10 |
-
if
|
11 |
next_len = '1'
|
12 |
-
elif
|
13 |
next_len = '2'
|
14 |
-
elif
|
15 |
next_len = '3'
|
16 |
else:
|
17 |
next_len = '-'
|
18 |
|
19 |
-
|
20 |
-
if who_is_next == 'Kirill':
|
21 |
next_who = 'G'
|
22 |
-
elif
|
23 |
next_who = 'H'
|
24 |
|
25 |
|
@@ -44,7 +43,7 @@ def chat_function(message, length_of_the_answer, who_is_next, creativity): # m
|
|
44 |
# append the new user input tokens to the chat history
|
45 |
chat_history_ids = torch.cat([chat_history_ids, new_user_input_ids], dim=-1)
|
46 |
|
47 |
-
print(tokenizer.decode(chat_history_ids[-1])) # uncomment to see full gpt input
|
48 |
|
49 |
# save previous len
|
50 |
input_len = chat_history_ids.shape[-1]
|
@@ -57,7 +56,7 @@ def chat_function(message, length_of_the_answer, who_is_next, creativity): # m
|
|
57 |
do_sample=True,
|
58 |
top_k=50,
|
59 |
top_p=0.9,
|
60 |
-
temperature = float(
|
61 |
mask_token_id=tokenizer.mask_token_id,
|
62 |
eos_token_id=tokenizer.eos_token_id,
|
63 |
unk_token_id=tokenizer.unk_token_id,
|
@@ -94,11 +93,12 @@ model = model.eval()
|
|
94 |
# Gradio
|
95 |
checkbox_group = gr.inputs.CheckboxGroup(['Kirill', 'Me'], default=['Kirill'], type="value", label=None)
|
96 |
title = "Chat with Kirill (in Russian)"
|
97 |
-
description = "Тут можно поболтать со мной. Но вместо меня бот. Оставь
|
98 |
-
article = "<p style='text-align: center'><a href='https://github.com/Kirili4ik/ruDialoGpt3-finetune-colab'>Github with fine-tuning GPT-
|
99 |
examples = [
|
100 |
-
["
|
101 |
-
["
|
|
|
102 |
]
|
103 |
|
104 |
iface = gr.Interface(chat_function,
|
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
from util_funcs import get_length_param
|
5 |
|
6 |
+
def chat_function(Message, Length_of_the_answer, Who_is_next, Base_to_On_subject): # model, tokenizer
|
7 |
|
8 |
+
input_user = Message
|
9 |
|
10 |
+
if Length_of_the_answer == 'short':
|
11 |
next_len = '1'
|
12 |
+
elif Length_of_the_answer == 'medium':
|
13 |
next_len = '2'
|
14 |
+
elif Length_of_the_answer == 'long':
|
15 |
next_len = '3'
|
16 |
else:
|
17 |
next_len = '-'
|
18 |
|
19 |
+
if Who_is_next == 'Kirill':
|
|
|
20 |
next_who = 'G'
|
21 |
+
elif Who_is_next == 'Me':
|
22 |
next_who = 'H'
|
23 |
|
24 |
|
|
|
43 |
# append the new user input tokens to the chat history
|
44 |
chat_history_ids = torch.cat([chat_history_ids, new_user_input_ids], dim=-1)
|
45 |
|
46 |
+
# print(tokenizer.decode(chat_history_ids[-1])) # uncomment to see full gpt input
|
47 |
|
48 |
# save previous len
|
49 |
input_len = chat_history_ids.shape[-1]
|
|
|
56 |
do_sample=True,
|
57 |
top_k=50,
|
58 |
top_p=0.9,
|
59 |
+
temperature = float(Base_to_On_subject), # 0 for greedy
|
60 |
mask_token_id=tokenizer.mask_token_id,
|
61 |
eos_token_id=tokenizer.eos_token_id,
|
62 |
unk_token_id=tokenizer.unk_token_id,
|
|
|
93 |
# Gradio
|
94 |
checkbox_group = gr.inputs.CheckboxGroup(['Kirill', 'Me'], default=['Kirill'], type="value", label=None)
|
95 |
title = "Chat with Kirill (in Russian)"
|
96 |
+
description = "Тут можно поболтать со мной. Но вместо меня бот. \n Оставь сообщение пустым, чтобы Кирилл продолжил говорить - он очень любит писать подряд несколько сообщений в чате. Подбробнее о технике по ссылке внизу."
|
97 |
+
article = "<p style='text-align: center'><a href='https://github.com/Kirili4ik/ruDialoGpt3-finetune-colab'>Github with fine-tuning GPT-3 on your chat</a></p>"
|
98 |
examples = [
|
99 |
+
["В чем смысл жизни?", 'medium', 'Kirill', 0.95],
|
100 |
+
["Когда у тебя ближайший собес?", 'medium', 'Kirill', 0.85],
|
101 |
+
["Сколько тебе лет, Кирилл?", 'medium', 'Kirill', 0.85]
|
102 |
]
|
103 |
|
104 |
iface = gr.Interface(chat_function,
|