t5-paraphrasing / app.py
Ceshine Lee
Add a temperature option
3084017
raw
history blame
1.86 kB
import os
import json
import requests
import gradio as gr
from gradio import inputs, outputs
ENDPOINT = (
"https://api-inference.huggingface.co/models/ceshine/t5-paraphrase-quora-paws"
)
def paraphrase(source_text: str, temperature: float):
if temperature > 0:
params = {
"do_sample": True,
"temperature": temperature,
"top_k": 5,
"num_return_sequences": 10,
"max_length": 100
}
else:
params = {
"num_beams": 10,
"num_return_sequences": 10,
"max_length": 100
}
res = requests.post(
ENDPOINT,
headers={"Authorization": f"Bearer {os.environ['TOKEN']}"},
data=json.dumps(
{
"inputs": "paraphrase: " + source_text,
"parameters": params,
}
),
)
if not (res.status_code == 200):
return f"Got a {res.status_code} status code from HuggingFace."
results = res.json()
# print(results)
outputs = [
x["generated_text"]
for x in results
if x["generated_text"].lower() != source_text.lower().strip()
][:3]
text = ""
for i, output in enumerate(outputs):
text += f"{i+1}: {output}\n\n"
return text
interface = gr.Interface(
fn=paraphrase,
inputs=[
inputs.Textbox(label="Source text"),
inputs.Number(default=0.0, label="Temperature (0 -> disable sampling and use beam search)"),
],
outputs=outputs.Textbox(label="Generated text:"),
title="T5 Sentence Paraphraser",
description="A paraphrasing model trained on PAWS and Quora datasets.",
examples=[
["I bought a ticket from London to New York.", 0],
["Weh Seun spends 14 hours a week doing housework.", 1.2],
],
)
interface.launch(enable_queue=True)