|
import gradio as gr |
|
from gradio_client import Client as GrClient |
|
import inspect |
|
from gradio import routes |
|
from typing import List, Type |
|
from aiogoogletrans import Translator |
|
|
|
import requests, os, re, asyncio |
|
|
|
|
|
loop = asyncio.get_event_loop() |
|
gradio_client = GrClient(os.environ.get('GrClient_url')) |
|
translator = Translator() |
|
|
|
def get_types(cls_set: List[Type], component: str): |
|
docset = [] |
|
types = [] |
|
if component == "input": |
|
for cls in cls_set: |
|
doc = inspect.getdoc(cls) |
|
doc_lines = doc.split("\n") |
|
docset.append(doc_lines[1].split(":")[-1]) |
|
types.append(doc_lines[1].split(")")[0].split("(")[-1]) |
|
else: |
|
for cls in cls_set: |
|
doc = inspect.getdoc(cls) |
|
doc_lines = doc.split("\n") |
|
docset.append(doc_lines[-1].split(":")[-1]) |
|
types.append(doc_lines[-1].split(")")[0].split("(")[-1]) |
|
return docset, types |
|
routes.get_types = get_types |
|
|
|
|
|
|
|
def mbti(x): |
|
t = loop.run_until_complete(translator.translate(x, src='ko', dest='en')) |
|
str_trans = re.sub('[-=+,#/\?:^.@*\"β»~γ!γβ|\(\)\[\]`\'β¦γ\β\β\βΒ·]', '', t.text) |
|
result = gradio_client.predict( |
|
str_trans, |
|
fn_index=2 |
|
) |
|
r = sorted(eval(result), key=lambda x : x['score'], reverse=True) |
|
|
|
return r |
|
|
|
def chat(x): |
|
x = f"[***λλ Assistantμ
λλ€. μλμκ² λ€μν μ§λ¬Έμ νλ©° λνλ₯Ό μ΄λκ³ μμ΅λλ€. Humanμκ² κΈμ μ μ΄κ³ , 곡κ°νλ©°, μ΅λν κΈΈκ² λλ΅ν΄μ£ΌμΈμ***] {x}" |
|
x = x.replace('friend','Human').replace('you','Assistant') |
|
print("\n" + f"{x}") |
|
result = gradio_client.predict( |
|
x, |
|
|
|
0.9, |
|
40, |
|
0.65, |
|
30, |
|
1.2, |
|
fn_index=0 |
|
) |
|
result = str(result) |
|
output = result[len(x)-4:] |
|
output = re.sub('νν','γ
γ
', output) |
|
output = output.split('λ')[0] |
|
output = output.split('<|endoftext|>')[0] |
|
output = re.sub('[=+#/\:@*\"β»γγβ|\\\<\>\(\)\[\]`\'β¦γ\β\β\βΒ·]', ' ', output) |
|
output = re.sub('[a-zA-Z]',' ',output) |
|
|
|
return output |
|
|
|
def yn(x): |
|
result = gradio_client.predict( |
|
x, |
|
fn_index=1 |
|
) |
|
return result |
|
|
|
with gr.Blocks() as demo: |
|
count = 0 |
|
aa = gr.Interface( |
|
fn=chat, |
|
inputs="text", |
|
outputs="text", |
|
description="chat", |
|
|
|
examples= [[f"\nHuman: λλ κΏμ΄ λμΌ? \n\n### \nAssistant: "],[f"\nAssistant: λλ λ¬΄μ¨ μμ κ°μ₯ μ’μν΄? \nHuman: κΈμ λλ? \n\n### \nAssistant: "]] |
|
) |
|
|
|
bb = gr.Interface( |
|
fn=mbti, |
|
inputs="text", |
|
outputs="text", |
|
description="mbti" |
|
) |
|
|
|
cc = gr.Interface( |
|
fn=yn, |
|
inputs="text", |
|
outputs="text", |
|
description="yn" |
|
) |
|
demo.queue(max_size=32).launch(enable_queue=True) |