neerajkalyank commited on
Commit
d021ecf
·
verified ·
1 Parent(s): c1aacae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -7
app.py CHANGED
@@ -3,19 +3,15 @@ import torch
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
  from joblib import Memory
5
  import datetime
6
- import os
7
-
8
- # Enable offline mode for Hugging Face transformers
9
- os.environ["HF_HUB_OFFLINE"] = "1"
10
 
11
  # Initialize cache
12
  cache_dir = "./cache"
13
  memory = Memory(cache_dir, verbose=0)
14
 
15
- # Load pre-trained model and tokenizer with local_files_only=True
16
  model_name = "distilgpt2"
17
- tokenizer = AutoTokenizer.from_pretrained(model_name, local_files_only=True)
18
- model = AutoModelForCausalLM.from_pretrained(model_name, local_files_only=True)
19
 
20
  # Set pad_token_id to eos_token_id to avoid warnings
21
  tokenizer.pad_token = tokenizer.eos_token
 
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
  from joblib import Memory
5
  import datetime
 
 
 
 
6
 
7
  # Initialize cache
8
  cache_dir = "./cache"
9
  memory = Memory(cache_dir, verbose=0)
10
 
11
+ # Load pre-trained model and tokenizer (allow online download)
12
  model_name = "distilgpt2"
13
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
14
+ model = AutoModelForCausalLM.from_pretrained(model_name)
15
 
16
  # Set pad_token_id to eos_token_id to avoid warnings
17
  tokenizer.pad_token = tokenizer.eos_token