Spaces:
Runtime error
Runtime error
from threading import Thread | |
import gradio as gr | |
import inspect | |
from gradio import routes | |
from typing import List, Type | |
from petals import AutoDistributedModelForCausalLM | |
import requests, os, re, asyncio, json | |
loop = asyncio.get_event_loop() | |
# init code | |
def get_types(cls_set: List[Type], component: str): | |
docset = [] | |
types = [] | |
if component == "input": | |
for cls in cls_set: | |
doc = inspect.getdoc(cls) | |
doc_lines = doc.split("\n") | |
docset.append(doc_lines[1].split(":")[-1]) | |
types.append(doc_lines[1].split(")")[0].split("(")[-1]) | |
else: | |
for cls in cls_set: | |
doc = inspect.getdoc(cls) | |
doc_lines = doc.split("\n") | |
docset.append(doc_lines[-1].split(":")[-1]) | |
types.append(doc_lines[-1].split(")")[0].split("(")[-1]) | |
return docset, types | |
routes.get_types = get_types | |
# App code | |
def chat(id, npc, prompt): | |
# get_coin endpoint | |
response = requests.post("https://ldhldh-api-for-unity.hf.space/run/predict_6", json={ | |
"data": [ | |
id, | |
]}).json() | |
coin = response["data"][0] | |
if int(coin) == 0: | |
return "no coin" | |
# model inference | |
output = "AI μλ΅μ λλ€." | |
# add_transaction endpoint | |
response = requests.post("https://ldhldh-api-for-unity.hf.space/run/predict_5", json={ | |
"data": [ | |
id, | |
"inference", | |
"### input:\n" + prompt + "\n\n### output:\n" + output | |
]}).json() | |
d = response["data"][0] | |
return output | |
with gr.Blocks() as demo: | |
count = 0 | |
aa = gr.Interface( | |
fn=chat, | |
inputs=["text","text","text"], | |
outputs="text", | |
description="chat, ai μλ΅μ λ°νν©λλ€. λ΄λΆμ μΌλ‘ νΈλμμ μμ±. \n /run/predict", | |
) | |
demo.queue(max_size=32).launch(enable_queue=True) |