Spaces:
Sleeping
Sleeping
File size: 2,952 Bytes
c2f1466 722ab73 c2f1466 40641cd cf7a07e da9b438 7192ffe da9b438 464280b e846f7d c2f1466 12ef829 99f57a5 12ef829 9ce2a22 12ef829 c2f1466 da9b438 6392832 6b65fd5 447a98e 6b65fd5 d6a0df6 6b65fd5 95f3d09 c2f1466 4d99237 feb20d7 2ca9a7f c9c6884 2ca9a7f c2f1466 12ef829 c2f1466 4d99237 6b65fd5 c2f1466 5be15aa 3104338 5be15aa 6872135 c1f0168 5be15aa 3104338 5be15aa 3104338 5be15aa 3104338 5be15aa 3104338 8e3af47 464dc5d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
import gradio as gr
from gradio_client import Client as GrClient
import inspect
from gradio import routes
from typing import List, Type
from aiogoogletrans import Translator
import requests, os, re, asyncio
loop = asyncio.get_event_loop()
gradio_client = GrClient(os.environ.get('GrClient_url'))
translator = Translator()
# Monkey patch
def get_types(cls_set: List[Type], component: str):
docset = []
types = []
if component == "input":
for cls in cls_set:
doc = inspect.getdoc(cls)
doc_lines = doc.split("\n")
docset.append(doc_lines[1].split(":")[-1])
types.append(doc_lines[1].split(")")[0].split("(")[-1])
else:
for cls in cls_set:
doc = inspect.getdoc(cls)
doc_lines = doc.split("\n")
docset.append(doc_lines[-1].split(":")[-1])
types.append(doc_lines[-1].split(")")[0].split("(")[-1])
return docset, types
routes.get_types = get_types
def cut(prom, out):
output = out[len(prom)-2:]
output = output.split('<|endoftext|>')[0]
output = output.split('\n')[0]
output = re.sub('[=+#/\:@*\"β»γγβ|\\\<\>\(\)\[\]`\'β¦γ\β\β\βΒ·]', ' ', output)
return output
# App code
def mbti(x):
t = loop.run_until_complete(translator.translate(x, src='ko', dest='en'))
str_trans = re.sub('[-=+,#/\?:^.@*\"β»~γ!γβ|\(\)\[\]`\'β¦γ\β\β\βΒ·]', '', t.text)
result = gradio_client.predict(
str_trans, # str representing input in 'User input' Textbox component
fn_index=2
)
r = sorted(eval(result), key=lambda x : x['score'], reverse=True)
return r
def chat(x):
result = gradio_client.predict(
x,# str representing input in 'User input' Textbox component
0.9, # float, representing input in 'Top-p (nucleus sampling)' Slider component
30, # int, representing input in 'Top-k (nucleus sampling)' Slider component
0.8, # float, representing input in 'Temperature' Slider component
22, # int, representing input in 'Max New Tokens' Slider component
1.2, # float, representing input in 'repetition_penalty' Slider component
fn_index=0
)
result = cut(x, result)
return result
def yn(x):
result = gradio_client.predict(
x, # str representing input in 'User input' Textbox component
fn_index=1
)
return result
with gr.Blocks() as demo:
aa = gr.Interface(
fn=chat,
inputs="text",
outputs="text",
description="chat",
examples= [["\nfriend: λλ κΏμ΄ λμΌ? \n\n### \nyou: "],["\nyou: λλ λ¬΄μ¨ μμ κ°μ₯ μ’μν΄? \nfriend: κΈμ λλ? \n\n### \nyou: "]]
)
bb = gr.Interface(
fn=mbti,
inputs="text",
outputs="text",
description="mbti"
)
cc = gr.Interface(
fn=yn,
inputs="text",
outputs="text",
description="yn"
)
demo.queue(max_size=32).launch(enable_queue=True) |