Spaces:
Sleeping
Sleeping
import gradio as gr | |
from gradio_client import Client as GrClient | |
import inspect | |
from gradio import routes | |
from typing import List, Type | |
import requests, os, re, asyncio | |
loop = asyncio.get_event_loop() | |
gradio_client = GrClient('https://ldhldh-demo.hf.space/') | |
# Monkey patch | |
def get_types(cls_set: List[Type], component: str): | |
docset = [] | |
types = [] | |
if component == "input": | |
for cls in cls_set: | |
doc = inspect.getdoc(cls) | |
doc_lines = doc.split("\n") | |
docset.append(doc_lines[1].split(":")[-1]) | |
types.append(doc_lines[1].split(")")[0].split("(")[-1]) | |
else: | |
for cls in cls_set: | |
doc = inspect.getdoc(cls) | |
doc_lines = doc.split("\n") | |
docset.append(doc_lines[-1].split(":")[-1]) | |
types.append(doc_lines[-1].split(")")[0].split("(")[-1]) | |
return docset, types | |
routes.get_types = get_types | |
# App code | |
def chat(x): | |
x = f"[***λλ Assistantμ λλ€. μλμκ² λ€μν μ§λ¬Έμ νλ©° λνλ₯Ό μ΄λκ³ μμ΅λλ€. Humanμκ² κΈμ μ μ΄κ³ , 곡κ°νλ©°, μ΅λν κΈΈκ² λλ΅ν΄μ£ΌμΈμ***]\nHuman: {x}\n\n###\nAssistant:" | |
print("\n___________________\n" + f"{x}") | |
result = gradio_client.predict( | |
x, | |
# str representing input in 'User input' Textbox component | |
0.91, # float, representing input in 'Top-p (nucleus sampling)' Slider component | |
40, # int, representing input in 'Top-k (nucleus sampling)' Slider component | |
0.65, # float, representing input in 'Temperature' Slider component | |
20, # int, representing input in 'Max New Tokens' Slider component | |
1.2, # float, representing input in 'repetition_penalty' Slider component | |
fn_index=0 | |
) | |
result = str(result) | |
output = result[len(x.rsplit(':', 1)[0])+2:] | |
output = re.sub('νν','γ γ ', output) | |
output = output.split('λ')[0] | |
output = output.split('endoftext')[0] | |
output = re.sub('[=+#/\:@*\"β»γγβ|\\\<\>\(\)\[\]`\'β¦γ\β\β\βΒ·]', '', output) | |
#output = re.sub('[a-zA-Z]',' ',output) | |
return output | |
with gr.Blocks() as demo: | |
count = 0 | |
aa = gr.Interface( | |
fn=chat, | |
inputs="text", | |
outputs="text", | |
description="chat", | |
#examples= [[f"\nfriend: λλ κΏμ΄ λμΌ? \nyou: "],[f"\nyou: λλ λ¬΄μ¨ μμ κ°μ₯ μ’μν΄? \nfriend: κΈμ λλ? \nyou: "]] | |
examples= [[f"λλ κΏμ΄ λμΌ?"],[f"λλ λ¬΄μ¨ μμ κ°μ₯ μ’μν΄?"]] | |
) | |
demo.queue(max_size=32).launch(enable_queue=True) |