File size: 1,714 Bytes
d38d726
 
 
 
f70ff9e
d38d726
 
 
 
 
 
f70ff9e
d38d726
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46ff5db
 
d38d726
 
46ff5db
d38d726
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import os
import json

import requests
import gradio as gr
from gradio import inputs, outputs

ENDPOINT = (
    "https://api-inference.huggingface.co/models/ceshine/t5-paraphrase-quora-paws"
)


def paraphrase(source_text):
    res = requests.post(
        ENDPOINT,
        headers={"Authorization": f"Bearer {os.environ['TOKEN']}"},
        data=json.dumps(
            {
                "inputs": source_text,
                "parameters": {
                    # "do_sample": True,
                    "num_beams": 10,
                    "top_k": 5,
                    "repetition_penalty": 2.0,
                    "temperature": 1.5,
                    "num_return_sequences": 10,
                    "max_length": 200,
                },
            }
        ),
    )
    if not (res.status_code == 200):
        raise ValueError(
            "Could not complete request to HuggingFace API, Error {}".format(
                res.status_code
            )
        )
    results = res.json()
    print(results)
    outputs = [
        x["generated_text"]
        for x in results
        if x["generated_text"].lower() != source_text.lower().strip()
    ][:3]
    text = ""
    for i, output in enumerate(outputs):
        text += f"{i+1}:  {output}\n\n"
    return text


interface = gr.Interface(
    fn=paraphrase,
    inputs=inputs.Textbox(label="Input"),
    outputs=outputs.Textbox(label="Generated text:"),
    title="T5 Sentence Paraphraser",
    description="A paraphrasing model trained on PAWS and Quora datasets",
    examples=[
        ["I bought a ticket from London to New York."],
        ["Weh Seun spends 14 hours a week doing housework."],
    ],
)

interface.launch(enable_queue=True)