sayyedAhmed
commited on
Commit
·
265cf85
1
Parent(s):
eae485e
app
Browse files
app.py
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import torch
|
3 |
+
import numpy as np
|
4 |
+
import matplotlib.pyplot as plt
|
5 |
+
import seaborn as sns
|
6 |
+
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
|
7 |
+
import joblib
|
8 |
+
|
9 |
+
# Load the model and scaler
|
10 |
+
def load_model(model_path, scaler_path):
|
11 |
+
model_state = torch.load(model_path)
|
12 |
+
model = LSTMPredictor(
|
13 |
+
input_dim=model_state['model_architecture']['input_dim'],
|
14 |
+
hidden_dim=model_state['model_architecture']['hidden_dim'],
|
15 |
+
output_dim=model_state['model_architecture']['output_dim'],
|
16 |
+
forecast_horizon=model_state['model_architecture']['forecast_horizon'],
|
17 |
+
n_layers=model_state['model_architecture']['n_layers'],
|
18 |
+
dropout=model_state['model_architecture']['dropout']
|
19 |
+
)
|
20 |
+
model.load_state_dict(model_state['model_state_dict'])
|
21 |
+
scaler = joblib.load(scaler_path)
|
22 |
+
return model, scaler
|
23 |
+
|
24 |
+
# Prepare Streamlit interface
|
25 |
+
st.title("Crisis Severity Prediction Model Evaluation")
|
26 |
+
st.sidebar.title("Model Evaluation Dashboard")
|
27 |
+
|
28 |
+
# Upload model and scaler files
|
29 |
+
model_file = st.sidebar.file_uploader("Upload Trained Model", type=["pth", "pt"])
|
30 |
+
scaler_file = st.sidebar.file_uploader("Upload Scaler File", type=["pkl"])
|
31 |
+
|
32 |
+
if model_file and scaler_file:
|
33 |
+
# Load model and scaler
|
34 |
+
model, scaler = load_model(model_file, scaler_file)
|
35 |
+
|
36 |
+
# Example of how to prepare test data (adjust for actual data)
|
37 |
+
X_test = np.array([[...]]) # Test data input
|
38 |
+
y_test = np.array([[...]]) # Actual target values
|
39 |
+
|
40 |
+
# Scale the test data using the loaded scaler
|
41 |
+
scaled_X_test = scaler.transform(X_test)
|
42 |
+
|
43 |
+
# Convert to tensor
|
44 |
+
X_test_tensor = torch.FloatTensor(scaled_X_test)
|
45 |
+
|
46 |
+
# Get predictions
|
47 |
+
model.eval()
|
48 |
+
with torch.no_grad():
|
49 |
+
predictions = model(X_test_tensor)
|
50 |
+
|
51 |
+
# Evaluate and display metrics (using first 3 months for example)
|
52 |
+
y_true = y_test
|
53 |
+
y_pred = predictions.numpy()
|
54 |
+
|
55 |
+
metrics = {}
|
56 |
+
for month in range(3): # Assuming forecast for 3 months
|
57 |
+
month_metrics = {
|
58 |
+
'mse': mean_squared_error(y_true[:, month], y_pred[:, month]),
|
59 |
+
'rmse': np.sqrt(mean_squared_error(y_true[:, month], y_pred[:, month])),
|
60 |
+
'mae': mean_absolute_error(y_true[:, month], y_pred[:, month]),
|
61 |
+
'r2': r2_score(y_true[:, month], y_pred[:, month])
|
62 |
+
}
|
63 |
+
metrics[f'month_{month+1}'] = month_metrics
|
64 |
+
|
65 |
+
# Display metrics
|
66 |
+
st.subheader("Model Performance Metrics:")
|
67 |
+
for month, month_metrics in metrics.items():
|
68 |
+
st.write(f"{month.upper()}:")
|
69 |
+
for metric_name, metric_value in month_metrics.items():
|
70 |
+
st.write(f"{metric_name.upper()}: {metric_value:.4f}")
|
71 |
+
|
72 |
+
# Visualization (actual vs predicted)
|
73 |
+
fig, ax = plt.subplots(figsize=(10, 6))
|
74 |
+
for month in range(3):
|
75 |
+
ax.scatter(y_true[:, month], y_pred[:, month], alpha=0.5, label=f'Month {month+1}')
|
76 |
+
ax.plot([0, 5], [0, 5], 'r--')
|
77 |
+
ax.set_xlabel('Actual Severity Index')
|
78 |
+
ax.set_ylabel('Predicted Severity Index')
|
79 |
+
ax.set_title('Actual vs Predicted')
|
80 |
+
ax.legend()
|
81 |
+
|
82 |
+
st.pyplot(fig)
|
83 |
+
|
84 |
+
# Option to download plot
|
85 |
+
st.download_button("Download Evaluation Plot", "evaluation_plot.png")
|
86 |
+
else:
|
87 |
+
st.warning("Please upload both the trained model and scaler files.")
|