Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -90,8 +90,9 @@ class QuranRecitationComparer:
|
|
90 |
try:
|
91 |
if token:
|
92 |
print(f"Loading model {model_name} with token...")
|
93 |
-
|
94 |
-
self.
|
|
|
95 |
else:
|
96 |
print(f"Loading model {model_name} without token...")
|
97 |
self.processor = AutoProcessor.from_pretrained(model_name)
|
@@ -99,7 +100,7 @@ class QuranRecitationComparer:
|
|
99 |
|
100 |
self.model = self.model.to(self.device)
|
101 |
self.model.eval()
|
102 |
-
#
|
103 |
self.model.config.output_hidden_states = True
|
104 |
print("Model loaded successfully!")
|
105 |
except Exception as e:
|
@@ -142,7 +143,7 @@ class QuranRecitationComparer:
|
|
142 |
).input_values.to(self.device)
|
143 |
|
144 |
with torch.no_grad():
|
145 |
-
# Call the model without passing output_hidden_states
|
146 |
outputs = self.model(inputs)
|
147 |
|
148 |
hidden_states = outputs.hidden_states[-1]
|
@@ -278,10 +279,7 @@ async def startup_event():
|
|
278 |
@app.get("/")
|
279 |
async def root():
|
280 |
"""Root endpoint to check if the API is running."""
|
281 |
-
if comparer
|
282 |
-
status = "active"
|
283 |
-
else:
|
284 |
-
status = "model not loaded"
|
285 |
return {"message": "Quran Recitation Comparer API is running", "status": status}
|
286 |
|
287 |
@app.post("/compare", response_model=ComparisonResult)
|
|
|
90 |
try:
|
91 |
if token:
|
92 |
print(f"Loading model {model_name} with token...")
|
93 |
+
# Use 'use_auth_token' instead of the deprecated 'token' parameter
|
94 |
+
self.processor = AutoProcessor.from_pretrained(model_name, use_auth_token=token)
|
95 |
+
self.model = AutoModelForCTC.from_pretrained(model_name, use_auth_token=token)
|
96 |
else:
|
97 |
print(f"Loading model {model_name} without token...")
|
98 |
self.processor = AutoProcessor.from_pretrained(model_name)
|
|
|
100 |
|
101 |
self.model = self.model.to(self.device)
|
102 |
self.model.eval()
|
103 |
+
# Ensure that hidden states are returned by default
|
104 |
self.model.config.output_hidden_states = True
|
105 |
print("Model loaded successfully!")
|
106 |
except Exception as e:
|
|
|
143 |
).input_values.to(self.device)
|
144 |
|
145 |
with torch.no_grad():
|
146 |
+
# Call the model without explicitly passing output_hidden_states
|
147 |
outputs = self.model(inputs)
|
148 |
|
149 |
hidden_states = outputs.hidden_states[-1]
|
|
|
279 |
@app.get("/")
|
280 |
async def root():
|
281 |
"""Root endpoint to check if the API is running."""
|
282 |
+
status = "active" if comparer else "model not loaded"
|
|
|
|
|
|
|
283 |
return {"message": "Quran Recitation Comparer API is running", "status": status}
|
284 |
|
285 |
@app.post("/compare", response_model=ComparisonResult)
|