ChihChiu29 commited on
Commit
be5ce64
·
1 Parent(s): a8f6370
Files changed (1) hide show
  1. main.py +4 -5
main.py CHANGED
@@ -21,13 +21,12 @@ token_size_limit = None
21
  # T5 model can use "any" sequence lenghth, but memory usage is O(L^2).
22
  # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-small")
23
  # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-small")
24
- # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
25
- # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-base")
26
- tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-large")
27
- model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-large")
28
  token_size_limit = 512
29
 
30
-
31
  # Too large for 16GB
32
  # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-xl")
33
  # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-xl")
 
21
  # T5 model can use "any" sequence lenghth, but memory usage is O(L^2).
22
  # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-small")
23
  # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-small")
24
+ tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
25
+ model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-base")
26
+ # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-large")
27
+ # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-large")
28
  token_size_limit = 512
29
 
 
30
  # Too large for 16GB
31
  # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-xl")
32
  # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-xl")