doublelotus commited on
Commit
59dd6f3
·
1 Parent(s): a4acab9

test cache

Browse files
Files changed (1) hide show
  1. main.py +2 -0
main.py CHANGED
@@ -8,6 +8,7 @@ from segment_anything import sam_model_registry, SamAutomaticMaskGenerator
8
  from PIL import Image
9
  import zipfile
10
  from transformers import pipeline
 
11
 
12
  app = Flask(__name__)
13
  CORS(app)
@@ -15,6 +16,7 @@ CORS(app)
15
  cudaOrNah = "cuda" if torch.cuda.is_available() else "cpu"
16
  print(cudaOrNah)
17
 
 
18
  # Global model setup
19
  # running out of memory adjusted
20
  # checkpoint = "sam_vit_h_4b8939.pth"
 
8
  from PIL import Image
9
  import zipfile
10
  from transformers import pipeline
11
+ import os
12
 
13
  app = Flask(__name__)
14
  CORS(app)
 
16
  cudaOrNah = "cuda" if torch.cuda.is_available() else "cpu"
17
  print(cudaOrNah)
18
 
19
+ os.environ['TRANSFORMERS_CACHE'] = '/tmp/transformers_cache'
20
  # Global model setup
21
  # running out of memory adjusted
22
  # checkpoint = "sam_vit_h_4b8939.pth"