karanzrk's picture
init
0f5ce2b
import gradio as gr
from huggingface_hub import InferenceClient
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
# client = InferenceClient("karanzrk/bert-Causal-QA")
from transformers import pipeline
generator = pipeline('text2text-generation', model = 'karanzrk/qa_t5', tokenizer="t5-small", max_length = 128)
# def respond(
# message,
# max_tokens,
# ):
# messages = [{"role": "system", "content": system_message}]
# for val in history:
# if val[0]:
# messages.append({"role": "user", "content": val[0]})
# if val[1]:
# messages.append({"role": "assistant", "content": val[1]})
# messages.append({"role": "user", "content": message})
# response = ""
# for message in client.chat_completion(
# messages,
# max_tokens=max_tokens,
# stream=True,
# temperature=temperature,
# top_p=top_p,
# ):
# token = message.choices[0].delta.content
# response += token
# yield response
# """
# For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
# """
# demo = gr.ChatInterface(
# respond,
# additional_inputs=[
# gr.Textbox(value="Question: ", label="System message"),
# gr.Slider(minimum=1, maximum=128, value=512, step=1, label="Max new tokens"),
# gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
# gr.Slider(
# minimum=0.1,
# maximum=1.0,
# value=0.95,
# step=0.05,
# label="Top-p (nucleus sampling)",
# ),
# ],
# )
def inference(text):
# classifier = pipeline("text-classification", model="karanzrk/essayl0")
text = "Question: " + text
output = generator(text)
answer = output[0]
return answer
# launcher = gr.Interface(
# fn=inference,
# inputs=gr.Textbox(lines=5, placeholder="Essay here...."),
# outputs="text"
# )
with gr.Blocks() as demo:
gr.Markdown(
"""
# Welcome to t5-demo
Ask your question
"""
)
inputs = gr.Textbox(label="Input Box",lines = 5, placeholder="Question: ")
button = gr.Button("Ask!")
output = gr.Textbox(label="Output Box")
button.click(fn=inference, inputs=inputs, outputs = output, api_name="Autograde")
demo.launch()
if __name__ == "__main__":
demo.launch()