File size: 2,459 Bytes
8c4195c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import transformers
import torch
import gradio as gr

# Load the chatbot model
model_id = "meta-llama/Meta-Llama-3-8B-Instruct"
pipeline = transformers.pipeline(
    "text-generation",
    model=model_id,
    model_kwargs={"torch_dtype": torch.bfloat16},
    device_map="auto",
)

# Function to calculate scores and rankings
def calculate_ranking(data):
    for institution in data:
        institution["Total"] = (
            institution["TLR"] + institution["GO"] + institution["OI"] + institution["PR"]
        )
    ranked_data = sorted(data, key=lambda x: x["Total"], reverse=True)
    for rank, institution in enumerate(ranked_data, start=1):
        institution["Rank"] = rank
    return ranked_data

# Chatbot function with ranking logic
def chatbot_response(user_message):
    if "rank" in user_message.lower():
        # Example data for ranking
        example_data = [
            {"Institution": "A", "TLR": 70, "GO": 85, "OI": 90, "PR": 75},
            {"Institution": "B", "TLR": 80, "GO": 88, "OI": 85, "PR": 90},
            {"Institution": "C", "TLR": 65, "GO": 80, "OI": 70, "PR": 60},
        ]
        ranked_data = calculate_ranking(example_data)
        response = "Here are the ranks of the institutions:\n"
        for institution in ranked_data:
            response += f"Rank {institution['Rank']}: {institution['Institution']} (Total Score: {institution['Total']})\n"
        return response
    else:
        # Generate chatbot response from model
        outputs = pipeline(
            user_message,
            max_new_tokens=256,
            do_sample=True,
            temperature=0.6,
            top_p=0.9,
        )
        return outputs[0]["generated_text"]

# Gradio interface
def build_gradio_ui():
    with gr.Blocks() as demo:
        gr.Markdown("## Chatbot with Hugging Face Spaces")
        gr.Markdown("Type a message to interact with the chatbot! (Ask about institution rankings too!)")
        with gr.Row():
            user_input = gr.Textbox(label="Your Message", placeholder="Type your message here...")
            chatbot_output = gr.Textbox(label="Chatbot Response", interactive=False)
        submit_button = gr.Button("Send")
        submit_button.click(chatbot_response, inputs=[user_input], outputs=[chatbot_output])

    return demo

# Launch the Gradio app
demo = build_gradio_ui()

if __name__ == "__main__":
    demo.launch()