File size: 2,803 Bytes
32b0b26
027e8d1
4f605ea
 
 
027e8d1
32b0b26
027e8d1
 
4f605ea
32b0b26
 
4f605ea
32b0b26
4f605ea
 
 
 
 
 
 
 
b718eb0
32b0b26
 
b718eb0
 
 
 
 
 
32b0b26
 
 
4f605ea
32b0b26
4f605ea
32b0b26
4f605ea
 
 
 
 
 
32b0b26
4f605ea
 
32b0b26
 
 
 
 
05b63be
32b0b26
 
 
b718eb0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4f605ea
32b0b26
b718eb0
4f605ea
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import asyncio
from deep_translator import GoogleTranslator
import gradio as gr
from huggingface_hub import InferenceClient

# DeepL Translate fonksiyonu (asenkron şekilde)
async def translate_text(text, src_lang='tr', dest_lang='en'):
    translated = await asyncio.to_thread(GoogleTranslator(source=src_lang, target=dest_lang).translate, text)
    return translated

# Hugging Face modelini kullanmak için client
client = InferenceClient("suayptalha/arrLlama")

# Modelin yanıtını döndürme fonksiyonu
def respond(
    message,
    history: list[tuple[str, str]],
    system_message,
    max_tokens,
    temperature,
    top_p,
):
    messages = [{"role": "system", "content": system_message}]
    
    # History'yi mesajlara ekleyelim
    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    # Kullanıcıdan gelen mesajı İngilizce'ye çevir
    translated_message_en = asyncio.run(translate_text(message, src_lang='tr', dest_lang='en'))
    messages.append({"role": "user", "content": translated_message_en})

    # Modelden gelen yanıt
    response = ""
    for msg in client.chat_completion(
        messages,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
    ):
        token = msg.choices[0].delta.content
        response += token

    # Modelin cevabını Türkçe'ye çevir (sadece kullanıcıya gösterilecek)
    translated_response_tr = asyncio.run(translate_text(response, src_lang='en', dest_lang='tr'))

    # History'ye sadece İngilizce mesaj ekle
    history.append((translated_message_en, response))  # İngilizce'yi ekliyoruz

    yield translated_response_tr  # Kullanıcıya Türkçe cevabı gösteriyoruz

# Gradio arayüzü
demo = gr.ChatInterface(
    respond,
    additional_inputs=[
        gr.Textbox(value="You are an AI assistant whose sole purpose is to inform the user about arrhythmia, monitor their heart rhythm, heart rate, arrhythmia risk, body temperature, and sweating, and provide advice based on these factors. If the user is at risk of arrhythmia, experiencing an arrhythmia episode, or going through an attack, you will give advice and explain what to do. You must not deviate from these topics.", label="System message"),
        gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
        gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
        gr.Slider(
            minimum=0.1,
            maximum=1.0,
            value=0.95,
            step=0.05,
            label="Top-p (nucleus sampling)",
        ),
    ],
)

# Arayüz başlatma
if __name__ == "__main__":
    demo.launch()