JMalott commited on
Commit
f47807d
·
1 Parent(s): 77bd19b

Update min_dalle/min_dalle.py

Browse files
Files changed (1) hide show
  1. min_dalle/min_dalle.py +1 -6
min_dalle/min_dalle.py CHANGED
@@ -24,6 +24,7 @@ IMAGE_TOKEN_COUNT = 256
24
 
25
 
26
  class MinDalle:
 
27
  def __init__(
28
  self,
29
  models_root: str = 'pretrained',
@@ -66,7 +67,6 @@ class MinDalle:
66
  self.init_decoder()
67
  self.init_detokenizer()
68
 
69
- @st.cache
70
  def download_tokenizer(self):
71
  if self.is_verbose: print("downloading tokenizer params")
72
  suffix = '' if self.is_mega else '_mini'
@@ -76,27 +76,23 @@ class MinDalle:
76
  with open(self.vocab_path, 'wb') as f: f.write(vocab.content)
77
  with open(self.merges_path, 'wb') as f: f.write(merges.content)
78
 
79
- @st.cache
80
  def download_encoder(self):
81
  if self.is_verbose: print("downloading encoder params")
82
  suffix = '' if self.is_mega else '_mini'
83
  params = requests.get(MIN_DALLE_REPO + 'encoder{}.pt'.format(suffix))
84
  with open(self.encoder_params_path, 'wb') as f: f.write(params.content)
85
 
86
- @st.cache
87
  def download_decoder(self):
88
  if self.is_verbose: print("downloading decoder params")
89
  suffix = '' if self.is_mega else '_mini'
90
  params = requests.get(MIN_DALLE_REPO + 'decoder{}.pt'.format(suffix))
91
  with open(self.decoder_params_path, 'wb') as f: f.write(params.content)
92
 
93
- @st.cache
94
  def download_detokenizer(self):
95
  if self.is_verbose: print("downloading detokenizer params")
96
  params = requests.get(MIN_DALLE_REPO + 'detoker.pt')
97
  with open(self.detoker_params_path, 'wb') as f: f.write(params.content)
98
 
99
- @st.cache
100
  def init_tokenizer(self):
101
  is_downloaded = os.path.exists(self.vocab_path)
102
  is_downloaded &= os.path.exists(self.merges_path)
@@ -108,7 +104,6 @@ class MinDalle:
108
  merges = f.read().split("\n")[1:-1]
109
  self.tokenizer = TextTokenizer(vocab, merges)
110
 
111
- @st.cache
112
  def init_encoder(self):
113
  is_downloaded = os.path.exists(self.encoder_params_path)
114
  if not is_downloaded: self.download_encoder()
 
24
 
25
 
26
  class MinDalle:
27
+ @st.cache
28
  def __init__(
29
  self,
30
  models_root: str = 'pretrained',
 
67
  self.init_decoder()
68
  self.init_detokenizer()
69
 
 
70
  def download_tokenizer(self):
71
  if self.is_verbose: print("downloading tokenizer params")
72
  suffix = '' if self.is_mega else '_mini'
 
76
  with open(self.vocab_path, 'wb') as f: f.write(vocab.content)
77
  with open(self.merges_path, 'wb') as f: f.write(merges.content)
78
 
 
79
  def download_encoder(self):
80
  if self.is_verbose: print("downloading encoder params")
81
  suffix = '' if self.is_mega else '_mini'
82
  params = requests.get(MIN_DALLE_REPO + 'encoder{}.pt'.format(suffix))
83
  with open(self.encoder_params_path, 'wb') as f: f.write(params.content)
84
 
 
85
  def download_decoder(self):
86
  if self.is_verbose: print("downloading decoder params")
87
  suffix = '' if self.is_mega else '_mini'
88
  params = requests.get(MIN_DALLE_REPO + 'decoder{}.pt'.format(suffix))
89
  with open(self.decoder_params_path, 'wb') as f: f.write(params.content)
90
 
 
91
  def download_detokenizer(self):
92
  if self.is_verbose: print("downloading detokenizer params")
93
  params = requests.get(MIN_DALLE_REPO + 'detoker.pt')
94
  with open(self.detoker_params_path, 'wb') as f: f.write(params.content)
95
 
 
96
  def init_tokenizer(self):
97
  is_downloaded = os.path.exists(self.vocab_path)
98
  is_downloaded &= os.path.exists(self.merges_path)
 
104
  merges = f.read().split("\n")[1:-1]
105
  self.tokenizer = TextTokenizer(vocab, merges)
106
 
 
107
  def init_encoder(self):
108
  is_downloaded = os.path.exists(self.encoder_params_path)
109
  if not is_downloaded: self.download_encoder()