alan5543 commited on
Commit
13ce723
·
1 Parent(s): b34f46e
Files changed (1) hide show
  1. app.py +33 -11
app.py CHANGED
@@ -9,7 +9,7 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
9
  # Load the model and tokenizer with GPU optimizations
10
  model = AutoModelForCausalLM.from_pretrained(
11
  "AlanYky/phi-3.5_tweets_instruct",
12
- torch_dtype=torch.float16, # Use FP16 for GPU
13
  trust_remote_code=True,
14
  )
15
 
@@ -25,7 +25,7 @@ pipe = pipeline(
25
 
26
  # Define generation arguments
27
  generation_args = {
28
- "max_new_tokens": 50,
29
  "return_full_text": False,
30
  "temperature": 0.7,
31
  "top_k": 20,
@@ -74,20 +74,42 @@ def clean_tweet(tweet):
74
 
75
  return cleaned_tweet.strip()
76
 
77
- # Gradio interface
78
  with gr.Blocks() as demo:
79
- gr.Markdown("# Tweet Generator")
80
- with gr.Row():
81
- instruction_input = gr.Textbox(
82
- label="Instruction",
83
- placeholder="Enter your tweet idea (It can be a topic, hashtag, sentence, or any format)..."
 
 
 
 
 
 
 
 
 
84
  )
85
 
86
- generate_button = gr.Button("Generate")
87
- output_box = gr.Textbox(label="Generated Tweet", placeholder="Your tweet will appear here.")
 
 
 
 
 
 
 
 
 
 
 
 
 
88
 
 
89
  generate_button.click(generate_tweet, inputs=instruction_input, outputs=output_box)
90
 
91
  print("Model loaded on:", next(model.parameters()).device)
92
 
93
- demo.launch()
 
9
  # Load the model and tokenizer with GPU optimizations
10
  model = AutoModelForCausalLM.from_pretrained(
11
  "AlanYky/phi-3.5_tweets_instruct",
12
+ torch_dtype=torch.float32,
13
  trust_remote_code=True,
14
  )
15
 
 
25
 
26
  # Define generation arguments
27
  generation_args = {
28
+ "max_new_tokens": 70,
29
  "return_full_text": False,
30
  "temperature": 0.7,
31
  "top_k": 20,
 
74
 
75
  return cleaned_tweet.strip()
76
 
 
77
  with gr.Blocks() as demo:
78
+ # Add a title with the X.com logo
79
+ with gr.Column(align="center"):
80
+ gr.Markdown(
81
+ """
82
+ <div style="text-align: center;">
83
+ <img src="https://upload.wikimedia.org/wikipedia/commons/b/b7/X_logo.jpg"
84
+ alt="X.com Logo" width="100">
85
+ <h1 style="font-size: 2.5em; margin: 0;">Tweet Generator</h1>
86
+ <p style="font-size: 1.2em; color: gray;">
87
+ Powered by <b>AlanYky/phi-3.5_tweets_instruct</b>
88
+ </p>
89
+ </div>
90
+ """,
91
+ elem_id="header"
92
  )
93
 
94
+ # Center the input and output components
95
+ with gr.Column(align="center"):
96
+ instruction_input = gr.Textbox(
97
+ label="Tweet Idea",
98
+ placeholder="Enter your tweet idea (It can be a topic, hashtag, sentence, or any format)...",
99
+ lines=2,
100
+ elem_id="input-box"
101
+ )
102
+ generate_button = gr.Button("Generate", elem_id="generate-button")
103
+ output_box = gr.Textbox(
104
+ label="Generated Tweet",
105
+ placeholder="Your tweet will appear here.",
106
+ lines=3,
107
+ elem_id="output-box"
108
+ )
109
 
110
+ # Connect the button to the generate function
111
  generate_button.click(generate_tweet, inputs=instruction_input, outputs=output_box)
112
 
113
  print("Model loaded on:", next(model.parameters()).device)
114
 
115
+ demo.launch()