File size: 6,263 Bytes
220b2e5
35a46cc
 
220b2e5
 
 
 
15fc200
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35a46cc
 
 
 
 
15fc200
35a46cc
 
 
 
 
 
 
 
818e952
35a46cc
 
efe9046
 
8f97ea3
0f88c21
15fc200
35a46cc
d1074e8
 
 
 
15fc200
35a46cc
 
 
220b2e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
from huggingface_hub import InferenceClient
import gradio as gr

client = InferenceClient(
    "mistralai/Mixtral-8x7B-Instruct-v0.1"
)

communities = '''
<h3 align="left">Communities:</h3>

<ul>
  <li>
    <p><strong><em><a href="https://developers.google.com/community/experts/directory?text=rishiraj">Google Developer Expert</a> in Machine Learning (Generative AI)</em></strong><br />
A Google Developers Expert (GDE) is a person recognized by Google as having exemplary expertise in web technologies or Google Developers products.</p>
  </li>
  <li>
    <p><strong><em><a href="https://twitter.com/TFUGKol">TensorFlow User Group Kolkata</a> (Organizer)</em></strong><br />
TensorFlow User Groups (TFUGs) are communities of developers, engineers, data scientists, and ML practitioners who are passionate about TensorFlow and related technologies.</p>
  </li>
  <li>
    <p><strong><em><a href="https://gdg.community.dev/gdg-cloud-kolkata/">Google Developer Groups Cloud Kolkata</a> (Volunteer)</em></strong><br />
Google Developer Groups (GDGs) Cloud are communities of developers, engineers, and cloud architects who are passionate about Google Cloud Platform and related technologies.</p>
  </li>
</ul>'''

# Function to handle dynamic content display
def show_info(section):
    if section == "Experiences":
        return "Details about Rishiraj's experiences..."
    elif section == "Communities":
        return communities
    elif section == "Recommendations":
        return "Rishiraj's recommendations..."
    elif section == "Conferences":
        return "Conferences attended by Rishiraj..."
    else:
        return "Select a section to display information."

# Creating Gradio Interface
with gr.Blocks() as app:
    with gr.Row():
        with gr.Column():
            gr.Markdown("# Hi 👋, I'm Rishiraj Acharya (ঋষিরাজ আচার্য্য)")
            gr.Markdown("## Google Developer Expert in ML ✨ | Hugging Face Fellow 🤗 | GSoC '22 at TensorFlow 👨🏻‍🔬 | TFUG Kolkata Organizer 🎙️ | Kaggle Master 🧠 | Dynopii ML Engineer 👨🏻‍💻")
            gr.Markdown("**I work with natural language understanding, machine translation, named entity recognition, question answering, topic segmentation, and automatic speech recognition. My work typically relies on very large quantities of data and innovative methods in deep learning to tackle user challenges around the world — in languages from around the world. My areas of work include Natural Language Engineering, Language Modeling, Text-to-Speech Software Engineering, Speech Frameworks Engineering, Data Science, and Research.**")
            gr.Markdown("⚡ Fun fact **I’m a national level Chess player, a swimming champion and I can lecture for hours on the outer reaches of space and the craziness of astrophysics.**")
            gr.HTML(value='<br><p align="center"><a href="https://twitter.com/rishirajacharya" target="blank"><img align="center" src="https://raw.githubusercontent.com/rahuldkjain/github-profile-readme-generator/master/src/images/icons/Social/twitter.svg" alt="rishirajacharya" height="30" width="40" /></a></p>')
        with gr.Column():
            gr.Image("profile.png")
    
    with gr.Row():
        section_dropdown = gr.Dropdown(["Experiences", "Communities", "Recommendations", "Conferences"], label="Select Information to Display")
        info_display = gr.HTML(label="Information")
    
    section_dropdown.change(show_info, inputs=section_dropdown, outputs=info_display)

def format_prompt(message, history):
  prompt = "<s>"
  for user_prompt, bot_response in history:
    prompt += f"[INST] {user_prompt} [/INST]"
    prompt += f" {bot_response}</s> "
  prompt += f"[INST] {message} [/INST]"
  return prompt

def generate(
    prompt, history, system_prompt, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
):
    temperature = float(temperature)
    if temperature < 1e-2:
        temperature = 1e-2
    top_p = float(top_p)

    generate_kwargs = dict(
        temperature=temperature,
        max_new_tokens=max_new_tokens,
        top_p=top_p,
        repetition_penalty=repetition_penalty,
        do_sample=True,
        seed=42,
    )

    formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
    stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
    output = ""

    for response in stream:
        output += response.token.text
        yield output
    return output


additional_inputs=[
    gr.Textbox(
        label="System Prompt",
        max_lines=1,
        interactive=True,
    ),
    gr.Slider(
        label="Temperature",
        value=0.9,
        minimum=0.0,
        maximum=1.0,
        step=0.05,
        interactive=True,
        info="Higher values produce more diverse outputs",
    ),
    gr.Slider(
        label="Max new tokens",
        value=256,
        minimum=0,
        maximum=1048,
        step=64,
        interactive=True,
        info="The maximum numbers of new tokens",
    ),
    gr.Slider(
        label="Top-p (nucleus sampling)",
        value=0.90,
        minimum=0.0,
        maximum=1,
        step=0.05,
        interactive=True,
        info="Higher values sample more low-probability tokens",
    ),
    gr.Slider(
        label="Repetition penalty",
        value=1.2,
        minimum=1.0,
        maximum=2.0,
        step=0.05,
        interactive=True,
        info="Penalize repeated tokens",
    )
]

examples=[["Can you explain how the QuickSort algorithm works and provide a Python implementation?", None, None, None, None, None,],
          ["What are some unique features of Rust that make it stand out compared to other systems programming languages like C++?", None, None, None, None, None,],
         ]

llm = gr.ChatInterface(
    fn=generate,
    chatbot=gr.Chatbot(show_label=True, show_share_button=True, show_copy_button=True, likeable=True, layout="bubble"),
    additional_inputs=additional_inputs,
    title="Hi 👋, I'm Rishiraj Acharya (ঋষিরাজ আচার্য্য)",
    examples=examples,
    concurrency_limit=20,
)

demo = gr.TabbedInterface([app, llm], ["About", "Chat"])
demo.launch()