llmahmad commited on
Commit
175ea08
·
verified ·
1 Parent(s): e2ccc4b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -11
app.py CHANGED
@@ -1,26 +1,30 @@
1
- # Load the GPT-2 model and tokenizer
2
  import os
3
  os.system('pip install streamlit transformers torch')
 
4
  import streamlit as st
5
  from transformers import GPT2LMHeadModel, GPT2Tokenizer
6
  import torch
7
 
8
-
9
  model_name = 'gpt2-large'
10
  tokenizer = GPT2Tokenizer.from_pretrained(model_name)
11
  model = GPT2LMHeadModel.from_pretrained(model_name)
12
 
13
  def generate_blog_post(topic):
14
- # Encode the input topic
15
- inputs = tokenizer.encode(topic, return_tensors='pt')
 
16
 
17
- # Generate the blog post
18
- outputs = model.generate(inputs, max_length=500, num_return_sequences=1, no_repeat_ngram_size=2,
19
- do_sample=True, top_k=50, top_p=0.95, temperature=0.9)
20
 
21
- # Decode the generated text
22
- blog_post = tokenizer.decode(outputs[0], skip_special_tokens=True)
23
- return blog_post
 
 
 
24
 
25
  # Streamlit app
26
  st.title("Blog Post Generator")
@@ -29,5 +33,6 @@ st.write("Enter a topic to generate a blog post.")
29
  topic = st.text_input("Topic:")
30
 
31
  if st.button("Generate"):
32
- blog_post = generate_blog_post(topic)
 
33
  st.write(blog_post)
 
 
1
  import os
2
  os.system('pip install streamlit transformers torch')
3
+
4
  import streamlit as st
5
  from transformers import GPT2LMHeadModel, GPT2Tokenizer
6
  import torch
7
 
8
+ # Load the GPT-2 model and tokenizer
9
  model_name = 'gpt2-large'
10
  tokenizer = GPT2Tokenizer.from_pretrained(model_name)
11
  model = GPT2LMHeadModel.from_pretrained(model_name)
12
 
13
  def generate_blog_post(topic):
14
+ try:
15
+ # Encode the input topic
16
+ inputs = tokenizer.encode(topic, return_tensors='pt')
17
 
18
+ # Generate the blog post
19
+ outputs = model.generate(inputs, max_length=500, num_return_sequences=1, no_repeat_ngram_size=2,
20
+ do_sample=True, top_k=50, top_p=0.95, temperature=0.9)
21
 
22
+ # Decode the generated text
23
+ blog_post = tokenizer.decode(outputs[0], skip_special_tokens=True)
24
+ return blog_post
25
+ except Exception as e:
26
+ st.error(f"Error: {e}")
27
+ return ""
28
 
29
  # Streamlit app
30
  st.title("Blog Post Generator")
 
33
  topic = st.text_input("Topic:")
34
 
35
  if st.button("Generate"):
36
+ with st.spinner('Generating...'):
37
+ blog_post = generate_blog_post(topic)
38
  st.write(blog_post)