KabeerAmjad
commited on
Commit
•
3e8dce3
1
Parent(s):
d9d7936
Update app.py
Browse files
app.py
CHANGED
@@ -54,33 +54,41 @@ except Exception as e:
|
|
54 |
# Function to predict image class
|
55 |
def predict(image):
|
56 |
try:
|
|
|
|
|
57 |
# Convert the uploaded file to a PIL image
|
58 |
input_image = image.convert("RGB")
|
|
|
59 |
|
60 |
# Preprocess the image
|
61 |
input_tensor = preprocess(input_image)
|
62 |
input_batch = input_tensor.unsqueeze(0) # Add batch dimension
|
|
|
63 |
|
64 |
# Check if a GPU is available and move the input and model to GPU
|
65 |
if torch.cuda.is_available():
|
66 |
input_batch = input_batch.to('cuda')
|
67 |
model.to('cuda')
|
|
|
68 |
else:
|
69 |
print("GPU not available, using CPU.")
|
70 |
|
71 |
# Perform inference
|
72 |
with torch.no_grad():
|
73 |
output = model(input_batch)
|
|
|
74 |
|
75 |
# Get the predicted class with the highest score
|
76 |
_, predicted_idx = torch.max(output, 1)
|
77 |
predicted_class = labels[str(predicted_idx.item())]
|
|
|
|
|
78 |
|
79 |
return f"Predicted class: {predicted_class}"
|
80 |
|
81 |
except Exception as e:
|
82 |
print(f"Error during prediction: {e}")
|
83 |
-
return "An error occurred during prediction
|
84 |
|
85 |
# Set up the Gradio interface
|
86 |
iface = gr.Interface(
|
|
|
54 |
# Function to predict image class
|
55 |
def predict(image):
|
56 |
try:
|
57 |
+
print("Starting prediction...")
|
58 |
+
|
59 |
# Convert the uploaded file to a PIL image
|
60 |
input_image = image.convert("RGB")
|
61 |
+
print(f"Image converted to RGB: {input_image.size}")
|
62 |
|
63 |
# Preprocess the image
|
64 |
input_tensor = preprocess(input_image)
|
65 |
input_batch = input_tensor.unsqueeze(0) # Add batch dimension
|
66 |
+
print(f"Input tensor shape after unsqueeze: {input_batch.shape}")
|
67 |
|
68 |
# Check if a GPU is available and move the input and model to GPU
|
69 |
if torch.cuda.is_available():
|
70 |
input_batch = input_batch.to('cuda')
|
71 |
model.to('cuda')
|
72 |
+
print("Using GPU for inference.")
|
73 |
else:
|
74 |
print("GPU not available, using CPU.")
|
75 |
|
76 |
# Perform inference
|
77 |
with torch.no_grad():
|
78 |
output = model(input_batch)
|
79 |
+
print(f"Inference output shape: {output.shape}")
|
80 |
|
81 |
# Get the predicted class with the highest score
|
82 |
_, predicted_idx = torch.max(output, 1)
|
83 |
predicted_class = labels[str(predicted_idx.item())]
|
84 |
+
print(f"Predicted class index: {predicted_idx.item()}")
|
85 |
+
print(f"Predicted class: {predicted_class}")
|
86 |
|
87 |
return f"Predicted class: {predicted_class}"
|
88 |
|
89 |
except Exception as e:
|
90 |
print(f"Error during prediction: {e}")
|
91 |
+
return f"An error occurred during prediction: {e}"
|
92 |
|
93 |
# Set up the Gradio interface
|
94 |
iface = gr.Interface(
|