Spaces:
Paused
Paused
removing force download from model loading
Browse files- backend/main.py +3 -4
backend/main.py
CHANGED
@@ -129,12 +129,11 @@ static_files = {
|
|
129 |
},
|
130 |
}
|
131 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
132 |
-
processor = AutoProcessor.from_pretrained("facebook/seamless-m4t-v2-large", force_download=True)
|
133 |
-
|
134 |
|
135 |
# PM - hardcoding temporarily as my GPU doesnt have enough vram
|
136 |
-
|
137 |
-
model = SeamlessM4Tv2Model.from_pretrained("facebook/seamless-m4t-v2-large", force_download=True).to(device)
|
138 |
|
139 |
|
140 |
bytes_data = bytearray()
|
|
|
129 |
},
|
130 |
}
|
131 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
132 |
+
# processor = AutoProcessor.from_pretrained("facebook/seamless-m4t-v2-large", force_download=True)
|
133 |
+
processor = AutoProcessor.from_pretrained("facebook/seamless-m4t-v2-large")
|
134 |
|
135 |
# PM - hardcoding temporarily as my GPU doesnt have enough vram
|
136 |
+
model = SeamlessM4Tv2Model.from_pretrained("facebook/seamless-m4t-v2-large").to(device)
|
|
|
137 |
|
138 |
|
139 |
bytes_data = bytearray()
|