Mikhail Nikolaev
commited on
Commit
·
6cbf875
1
Parent(s):
6d1c11e
Add application file
Browse files
app.py
CHANGED
@@ -2,7 +2,10 @@ import gradio as gr
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
import torch
|
4 |
|
5 |
-
|
|
|
|
|
|
|
6 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
7 |
model = AutoModelForCausalLM.from_pretrained(
|
8 |
model_id,
|
@@ -12,7 +15,7 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
12 |
|
13 |
def generate_response(prompt):
|
14 |
messages = [
|
15 |
-
{"role": "system", "content": "Ты T-
|
16 |
{"role": "user", "content": prompt}
|
17 |
]
|
18 |
|
@@ -40,7 +43,7 @@ interface = gr.Interface(
|
|
40 |
fn=generate_response,
|
41 |
inputs="text",
|
42 |
outputs="text",
|
43 |
-
title="T-
|
44 |
)
|
45 |
|
46 |
interface.launch()
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
import torch
|
4 |
|
5 |
+
# Установка сида для воспроизводимости
|
6 |
+
torch.manual_seed(42)
|
7 |
+
|
8 |
+
model_id = "t-tech/T-lite-it-1.0"
|
9 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
10 |
model = AutoModelForCausalLM.from_pretrained(
|
11 |
model_id,
|
|
|
15 |
|
16 |
def generate_response(prompt):
|
17 |
messages = [
|
18 |
+
{"role": "system", "content": "Ты T-lite, виртуальный ассистент в Т-Технологии. Твоя задача - быть полезным диалоговым ассистентом."},
|
19 |
{"role": "user", "content": prompt}
|
20 |
]
|
21 |
|
|
|
43 |
fn=generate_response,
|
44 |
inputs="text",
|
45 |
outputs="text",
|
46 |
+
title="T-lite API"
|
47 |
)
|
48 |
|
49 |
interface.launch()
|