toeknee432 commited on
Commit
4811c8f
Β·
1 Parent(s): b7653fa

Update pages/2_πŸ†š_Extractive_vs_Abstractive.py

Browse files
pages/2_πŸ†š_Extractive_vs_Abstractive.py CHANGED
@@ -25,10 +25,10 @@ def load_model():
25
  model_name = 'google/pegasus-billsum'
26
  torch_device = 'cuda' if torch.cuda.is_available() else 'cpu'
27
  #run using local model
28
- tokenizer = PegasusTokenizer.from_pretrained(model_name)
29
- model = PegasusForConditionalGeneration.from_pretrained(model_name, max_position_embeddings=2000).to(torch_device)
30
- #tokenizer = PegasusTokenizer.from_pretrained("local_pegasus-billsum_tokenizer", use_auth_token=True)
31
- #model = PegasusForConditionalGeneration.from_pretrained("local_pegasus-billsum_tokenizer_model", max_position_embeddings=2000, use_auth_token=True).to(torch_device)
32
  #tokenizer = PegasusTokenizer.from_pretrained("local_pegasus-billsum_tokenizer")
33
  #model = PegasusForConditionalGeneration.from_pretrained("local_pegasus-billsum_tokenizer_model", max_position_embeddings=2000).to(torch_device)
34
  return model,tokenizer
 
25
  model_name = 'google/pegasus-billsum'
26
  torch_device = 'cuda' if torch.cuda.is_available() else 'cpu'
27
  #run using local model
28
+ #tokenizer = PegasusTokenizer.from_pretrained(model_name)
29
+ #model = PegasusForConditionalGeneration.from_pretrained(model_name, max_position_embeddings=2000).to(torch_device)
30
+ tokenizer = PegasusTokenizer.from_pretrained("local_pegasus-billsum_tokenizer")
31
+ model = PegasusForConditionalGeneration.from_pretrained("local_pegasus-billsum_tokenizer_model", max_position_embeddings=2000).to(torch_device)
32
  #tokenizer = PegasusTokenizer.from_pretrained("local_pegasus-billsum_tokenizer")
33
  #model = PegasusForConditionalGeneration.from_pretrained("local_pegasus-billsum_tokenizer_model", max_position_embeddings=2000).to(torch_device)
34
  return model,tokenizer