sanjanatule commited on
Commit
7669e0a
·
1 Parent(s): d3369cb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -22
app.py CHANGED
@@ -3,29 +3,18 @@ import torch
3
  from torch import nn
4
  import lightning.pytorch as pl
5
  from torch.nn import functional as F
6
- from utils import GPTLM
7
 
8
  newmodel = GPTLM.load_from_checkpoint('shakespeare_gpt.pth')
9
 
10
- chars = ['\n', ' ', '!', '$', '&', "'", ',', '-', '.', '3', ':', ';', '?', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
11
- vocab_size = len(chars)
12
- # create a mapping from characters to integers
13
- stoi = { ch:i for i,ch in enumerate(chars) }
14
- itos = { i:ch for i,ch in enumerate(chars) }
15
-
16
- encode = lambda s: [stoi[c] for c in s] # encoder: take a string, output a list of integers
17
- decode = lambda l: ''.join([itos[i] for i in l]) # decoder: take a list of integers, output a string
18
-
19
-
20
  def generate_dialogue(character_dropdown):
21
-
22
-
23
  if character_dropdown == "NONE":
24
  context = torch.zeros((1, 1), dtype=torch.long)
25
  return decode(newmodel.model.generate(context, max_new_tokens=100)[0].tolist())
26
  else:
27
  context = torch.tensor([encode(character_dropdown)], dtype=torch.long)
28
  return decode(newmodel.model.generate(context, max_new_tokens=100)[0].tolist())
 
29
 
30
 
31
  HTML_TEMPLATE = """
@@ -124,29 +113,38 @@ with gr.Blocks(theme=gr.themes.Glass(),css=".gradio-container {background: url('
124
  gr.Markdown("")
125
  gr.Markdown("")
126
 
 
 
 
 
 
 
 
 
 
 
 
 
127
  gr.Markdown("")
128
  gr.Markdown("")
129
  gr.Markdown("")
130
  gr.Markdown("")
131
 
132
 
133
- with gr.Column():
134
  character_dropdown = gr.Dropdown(
135
  label="Select a Character",
136
  choices=["NONE","ROMEO","JULIET","MENENIUS","ANTONIO"],
137
  value='Dream'
138
  )
139
-
140
- inputs = [character_dropdown]
141
-
142
- with gr.Column():
143
- button = gr.Button("Generate")
144
- button.click(generate_dialogue, inputs=inputs, outputs=outputs)
145
-
146
- with gr.Row():
147
  outputs = gr.Textbox(
148
  label="Generated Dialogue"
149
  )
 
 
 
 
 
150
 
151
  if __name__ == "__main__":
152
  interface.launch(enable_queue=True)
 
3
  from torch import nn
4
  import lightning.pytorch as pl
5
  from torch.nn import functional as F
6
+ from utils import GPTLM,encode,decode
7
 
8
  newmodel = GPTLM.load_from_checkpoint('shakespeare_gpt.pth')
9
 
 
 
 
 
 
 
 
 
 
 
10
  def generate_dialogue(character_dropdown):
 
 
11
  if character_dropdown == "NONE":
12
  context = torch.zeros((1, 1), dtype=torch.long)
13
  return decode(newmodel.model.generate(context, max_new_tokens=100)[0].tolist())
14
  else:
15
  context = torch.tensor([encode(character_dropdown)], dtype=torch.long)
16
  return decode(newmodel.model.generate(context, max_new_tokens=100)[0].tolist())
17
+
18
 
19
 
20
  HTML_TEMPLATE = """
 
113
  gr.Markdown("")
114
  gr.Markdown("")
115
 
116
+ gr.Markdown("")
117
+ gr.Markdown("")
118
+ gr.Markdown("")
119
+ gr.Markdown("")
120
+ gr.Markdown("")
121
+ gr.Markdown("")
122
+
123
+ gr.Markdown("")
124
+ gr.Markdown("")
125
+ gr.Markdown("")
126
+ gr.Markdown("")
127
+
128
  gr.Markdown("")
129
  gr.Markdown("")
130
  gr.Markdown("")
131
  gr.Markdown("")
132
 
133
 
134
+ with gr.Row(scale=1):
135
  character_dropdown = gr.Dropdown(
136
  label="Select a Character",
137
  choices=["NONE","ROMEO","JULIET","MENENIUS","ANTONIO"],
138
  value='Dream'
139
  )
 
 
 
 
 
 
 
 
140
  outputs = gr.Textbox(
141
  label="Generated Dialogue"
142
  )
143
+ inputs = [character_dropdown]
144
+
145
+ with gr.Column(scale=1):
146
+ button = gr.Button("Generate")
147
+ button.click(generate_dialogue, inputs=inputs, outputs=outputs)
148
 
149
  if __name__ == "__main__":
150
  interface.launch(enable_queue=True)