File size: 3,507 Bytes
c2f1466
722ab73
c2f1466
 
 
40641cd
cf7a07e
da9b438
7192ffe
d79f686
da9b438
27520ac
e846f7d
c2f1466
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c374d69
c2f1466
 
da9b438
6392832
6b65fd5
447a98e
6b65fd5
 
d6a0df6
6b65fd5
95f3d09
ba70b81
c2f1466
2dbb094
1999931
d79f686
c2f1466
2a18f96
64b66a8
2dbb094
9eb39ef
2dbb094
bcc0372
f80e951
c2f1466
 
6880e23
a0552a1
c661741
27520ac
c374d69
334a56d
21c3384
bc406e3
c374d69
c2f1466
 
 
4d99237
6b65fd5
c2f1466
 
 
5be15aa
d79f686
5be15aa
3104338
5be15aa
 
6872135
27520ac
 
5be15aa
 
 
3104338
5be15aa
 
3104338
5be15aa
 
 
3104338
5be15aa
 
3104338
0de7e55
aba9039
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import gradio as gr
from gradio_client import Client as GrClient
import inspect
from gradio import routes
from typing import List, Type
from aiogoogletrans import Translator 

import requests, os, re, asyncio


loop = asyncio.get_event_loop()
gradio_client = GrClient(os.environ.get('GrClient_url'))
translator = Translator()
# Monkey patch
def get_types(cls_set: List[Type], component: str):
    docset = []
    types = []
    if component == "input":
        for cls in cls_set:
            doc = inspect.getdoc(cls)
            doc_lines = doc.split("\n")
            docset.append(doc_lines[1].split(":")[-1])
            types.append(doc_lines[1].split(")")[0].split("(")[-1])
    else:
        for cls in cls_set:
            doc = inspect.getdoc(cls)
            doc_lines = doc.split("\n")
            docset.append(doc_lines[-1].split(":")[-1])
            types.append(doc_lines[-1].split(")")[0].split("(")[-1])
    return docset, types
routes.get_types = get_types


# App code
def mbti(x):
    t = loop.run_until_complete(translator.translate(x, src='ko', dest='en'))
    str_trans = re.sub('[-=+,#/\?:^.@*\"β€»~ㆍ!γ€β€˜|\(\)\[\]`\'…》\”\β€œ\’·]', '', t.text)
    result = gradio_client.predict(
				str_trans,	# str representing input in 'User input' Textbox component
				fn_index=2
    )
    r = sorted(eval(result), key=lambda x : x['score'], reverse=True)
    
    return r

def chat(x):
    x = f"[***λ„ˆλŠ” Assistantμž…λ‹ˆλ‹€. μƒλŒ€μ—κ²Œ λ‹€μ–‘ν•œ μ§ˆλ¬Έμ„ ν•˜λ©° λŒ€ν™”λ₯Ό 이끌고 μžˆμŠ΅λ‹ˆλ‹€. Humanμ—κ²Œ 긍정적이고, κ³΅κ°ν•˜λ©°, μ΅œλŒ€ν•œ 길게 λŒ€λ‹΅ν•΄μ£Όμ„Έμš”***] {x}"
    x = x.replace('friend','Human').replace('you','Assistant')
    print("\n" + f"{x}")
    result = gradio_client.predict(
        x,
        # str representing input in 'User input' Textbox component
		0.9,	# float, representing input in 'Top-p (nucleus sampling)' Slider component
		40,	    # int, representing input in 'Top-k (nucleus sampling)' Slider component
		0.65,	# float, representing input in 'Temperature' Slider component
		30,	    # int, representing input in 'Max New Tokens' Slider component
		1.2,	# float, representing input in 'repetition_penalty' Slider component
		fn_index=0
    )
    result = str(result)
    output = result[len(x)-4:]
    output = re.sub('ν•˜ν•˜','γ…Žγ…Ž', output)
    output = output.split('띓')[0]
    output = output.split('<|endoftext|>')[0]
    output = re.sub('[=+#/\:@*\"β€»γ†γ€β€˜|\\\<\>\(\)\[\]`\'…》\”\β€œ\’·]', ' ', output)
    output = re.sub('[a-zA-Z]',' ',output)

    return output

def yn(x):
    result = gradio_client.predict(
				x,	# str representing input in 'User input' Textbox component
				fn_index=1
    )
    return result

with gr.Blocks() as demo:
    count = 0
    aa = gr.Interface(
      fn=chat,
      inputs="text",
      outputs="text",
      description="chat",
    #examples= [[f"\nfriend: λ„ˆλŠ” 꿈이 뭐야? \n\n### \nyou: "],[f"\nyou: λ„ˆλŠ” 무슨 색을 κ°€μž₯ μ’‹μ•„ν•΄? \nfriend: κΈ€μŽ„ λ„ˆλŠ”? \n\n### \nyou: "]]
        examples= [[f"\nHuman: λ„ˆλŠ” 꿈이 뭐야? \n\n### \nAssistant: "],[f"\nAssistant: λ„ˆλŠ” 무슨 색을 κ°€μž₯ μ’‹μ•„ν•΄? \nHuman: κΈ€μŽ„ λ„ˆλŠ”? \n\n### \nAssistant: "]]
    )
    
    bb = gr.Interface(
      fn=mbti,
      inputs="text",
      outputs="text",
      description="mbti"
    )
    
    cc = gr.Interface(
      fn=yn,
      inputs="text",
      outputs="text",
      description="yn"
    )  
    demo.queue(max_size=32).launch(enable_queue=True)