File size: 3,355 Bytes
867c9b3
 
 
 
 
 
 
 
 
 
3d25968
867c9b3
 
 
 
 
 
 
 
3d25968
867c9b3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3d25968
 
 
867c9b3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import gradio as gr
import random
import requests

import numpy as np

import pandas as pd


# Template
title = "Murder on Horsea Island Prototype with Sentence Similarity (Paraphrase XLM-R multilingual)🔪 (WORK IN PROGRESS)"
description = "Prototype of the Unity Game (to test the questions)."
article = """
"""
theme="huggingface"

# examples =

# API
SS_API_URL = "https://api-inference.huggingface.co/models/sentence-transformers/paraphrase-xlm-r-multilingual-v1"



# Build the 3 different questions array before starting
def build_initial_questions_and_answers():
    # Eleanor
    eleanor_df = pd.read_csv("eleanor.csv", delimiter=",")
    eleanor_len = eleanor_df.shape[0]
    eleanor_questions = [eleanor_df["Questions"][i] for i in range(eleanor_len)]
    eleanor_answers = [eleanor_df["Answers"][i] for i in range(eleanor_len)]

    # Tom
    tom_df = pd.read_csv("tom.csv", delimiter=",")
    tom_len = tom_df.shape[0]
    tom_questions = [tom_df["Questions"][i] for i in range(tom_len)]
    tom_answers = [tom_df["Answers"][i] for i in range(tom_len)]

    # Charles
    charles_df = pd.read_csv("charles.csv", delimiter=",")
    charles_len = charles_df.shape[0]
    charles_questions = [charles_df["Questions"][i] for i in range(charles_len)]
    charles_answers = [charles_df["Answers"][i] for i in range(charles_len)]

    return eleanor_questions, eleanor_answers, tom_questions, tom_answers, charles_questions, charles_answers



def build_json(message, questions):
    json = {
    "inputs": {
        "source_sentence": message,
        "sentences": questions
            },
    }
    return json


def query(payload, model):
    response = requests.post(SS_API_URL, json=payload)
    return response.json()

def answer(output_json, character):
    # First we handle output_json
    idx = np.argmax(output_json)

    if (character == "eleanor"):
        answer_ = eleanor_answers[idx]

    elif (character == "tom"):
       answer_ = tom_answers[idx]

    else:
        answer_ = charles_answers[idx]

    return answer_


def chat(message, character):
    history = gr.get_state() or []
    
    if (character == "eleanor"):
        json = build_json(message, eleanor_questions)

    elif (character == "tom"):
        json = build_json(message, tom_questions)
    
    else:
        json = build_json(message, charles_questions)

    output = query(json)

    answer_ = answer(output, character)


    history.append((message, answer_))
    gr.set_state(history)
    html = ""
    for user_msg, resp_msg in history:
        html += f"{user_msg}"
        html += f"{resp_msg}"
    html += ""
    return html

eleanor_questions, eleanor_answers, tom_questions, tom_answers, charles_questions, charles_answers = build_initial_questions_and_answers()

choices = ["Eleanor", "Tom", "Charles (The Butler)"]
character = gr.inputs.Radio(choices, type="value", default=None, label=None)

iface = gr.Interface(chat, ["text", character], "html", css="""
    .chatbox {display:flex;flex-direction:column}
    .user_msg, .resp_msg {padding:4px;margin-bottom:4px;border-radius:4px;width:80%}
    .user_msg {background-color:cornflowerblue;color:white;align-self:start}
    .resp_msg {background-color:lightgray;align-self:self-end}
""", allow_screenshot=False, allow_flagging=False)

iface.launch()

if __name__ == "__main__":

  iface.launch()