import gradio as gr from gradio_client import Client as GrClient import inspect from gradio import routes from typing import List, Type from aiogoogletrans import Translator import requests, os, re, asyncio loop = asyncio.get_event_loop() gradio_client = GrClient(os.environ.get('GrClient_url')) translator = Translator() # Monkey patch def get_types(cls_set: List[Type], component: str): docset = [] types = [] if component == "input": for cls in cls_set: doc = inspect.getdoc(cls) doc_lines = doc.split("\n") docset.append(doc_lines[1].split(":")[-1]) types.append(doc_lines[1].split(")")[0].split("(")[-1]) else: for cls in cls_set: doc = inspect.getdoc(cls) doc_lines = doc.split("\n") docset.append(doc_lines[-1].split(":")[-1]) types.append(doc_lines[-1].split(")")[0].split("(")[-1]) return docset, types routes.get_types = get_types # App code def mbti(x): t = loop.run_until_complete(translator.translate(x, src='ko', dest='en')) str_trans = re.sub('[-=+,#/\?:^.@*\"※~ㆍ!』‘|\(\)\[\]`\'…》\”\“\’·]', '', t.text) result = gradio_client.predict( str_trans, # str representing input in 'User input' Textbox component fn_index=2 ) return result def chat(x): result = gradio_client.predict( x,# str representing input in 'User input' Textbox component 0.9, # float, representing input in 'Top-p (nucleus sampling)' Slider component 50, # int, representing input in 'Top-k (nucleus sampling)' Slider component 0.9, # float, representing input in 'Temperature' Slider component 25, # int, representing input in 'Max New Tokens' Slider component 1.1, # float, representing input in 'repetition_penalty' Slider component fn_index=0 ) return result def yn(x): result = gradio_client.predict( x, # str representing input in 'User input' Textbox component fn_index=1 ) return result with gr.Blocks() as demo: aa = gr.Interface( fn=yn, inputs="text", outputs="text", examples=[ ["yes,no"] ], ) bb = gr.Interface( fn=chat, inputs="text", outputs="text", examples=[ ["chat"] ], ) cc = gr.Interface( fn=mbti, inputs="text", outputs="text", examples=[ ["mbti"] ], ) demo.queue(max_size=32).launch(enable_queue=True)