Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,3 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
-
gr.
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
"""Text-Generation-Gradio-App.ipynb
|
3 |
+
|
4 |
+
Automatically generated by Colaboratory.
|
5 |
+
|
6 |
+
Original file is located at
|
7 |
+
https://colab.research.google.com/drive/1OfP8zY_Nwx2U2QeYnYRanlH7_SuKzmGq
|
8 |
+
"""
|
9 |
+
%pip install -q gradio
|
10 |
+
%pip install -q git+https://github.com/huggingface/transformers.git
|
11 |
import gradio as gr
|
12 |
+
import tensorflow as tf
|
13 |
+
|
14 |
+
from transformers import TFGPT2LMHeadModel, GPT2Tokenizer
|
15 |
+
|
16 |
+
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
|
17 |
+
model = TFGPT2LMHeadModel.from_pretrained("gpt2",pad_token_id=tokenizer.eos_token_id)
|
18 |
+
|
19 |
+
def generate_text(inp):
|
20 |
+
input_ids = tokenizer.encode(inp, return_tensors='tf')
|
21 |
+
beam_output = model.generate(input_ids, max_length=100, num_beams=5, no_repeat_ngram_size=2, early_stopping= True)
|
22 |
+
output = tokenizer.decode(beam_output[0], skip_special_token=True, clean_up_tokenization_spaces=True)
|
23 |
+
return ".".join(output.split(".")[:-1]) + "."
|
24 |
|
25 |
+
output_text = gr.outputs.Textbox()
|
26 |
+
gr.Interface(generate_text,"textbox",output_text,title="Text Generation machine ",description="Ask any question. Note: It can take 20-60 seconds to generate output based on your internet connection.").launch()
|