Spaces:
Sleeping
Sleeping
import streamlit as st | |
import pandas as pd | |
import numpy as np | |
import seaborn as sns | |
import matplotlib.pyplot as plt | |
from sklearn.preprocessing import StandardScaler | |
from sklearn.model_selection import train_test_split | |
from keras.models import Sequential | |
from keras.layers import InputLayer, Dense | |
from sklearn.datasets import make_circles, make_classification, make_moons, make_blobs | |
from mlxtend.plotting import plot_decision_regions | |
from keras.optimizers import SGD | |
import time | |
# Custom background and styling | |
st.markdown( | |
""" | |
<style> | |
.main { | |
background: linear-gradient(to right, #f0f4f8, #d9e2ec); | |
} | |
</style> | |
""", | |
unsafe_allow_html=True | |
) | |
# App Title | |
st.title("๐ง NeuroVision Lab - Interactive Neural Network Playground") | |
# Sidebar: Dataset selection | |
st.sidebar.header("๐ฒ Generate Synthetic Data") | |
data_type = st.sidebar.selectbox("Select Dataset Type", ["make_circles", "make_classification", "make_moons", "make_blobs"]) | |
factor = st.sidebar.slider("Circle Factor (for make_circles)", 0.1, 1.0, 0.2) | |
noise = st.sidebar.slider("Add Noise", 0.0, 1.0, 0.1) | |
samples = st.sidebar.slider("Total Samples", 1000, 10000, 10000, step=100) | |
generate_scatter = st.sidebar.button("๐ Create Dataset") | |
# Initialize session state | |
if 'X' not in st.session_state: | |
st.session_state['X'] = None | |
if 'y' not in st.session_state: | |
st.session_state['y'] = None | |
# Function to generate data | |
def generate_data(data_type, samples, noise, factor): | |
if data_type == "make_circles": | |
st.session_state['X'], st.session_state['y'] = make_circles(n_samples=samples, noise=noise, factor=factor, random_state=42) | |
elif data_type == "make_classification": | |
st.session_state['X'], st.session_state['y'] = make_classification(n_samples=samples, n_features=2, n_informative=2, | |
n_redundant=0, n_clusters_per_class=1, flip_y=noise, random_state=42) | |
elif data_type == "make_moons": | |
st.session_state['X'], st.session_state['y'] = make_moons(n_samples=samples, noise=noise, random_state=42) | |
elif data_type == "make_blobs": | |
st.session_state['X'], st.session_state['y'] = make_blobs(n_samples=samples, centers=2, cluster_std=1.0, random_state=42) | |
# Scatterplot of generated data | |
if generate_scatter: | |
generate_data(data_type, samples, noise, factor) | |
if st.session_state['X'] is not None and st.session_state['y'] is not None: | |
df = pd.DataFrame(st.session_state['X'], columns=["x1", "x2"]) | |
df["label"] = st.session_state['y'] | |
st.subheader(f"๐งฉ Visualizing: {data_type}") | |
fig1, ax1 = plt.subplots() | |
sns.scatterplot(data=df, x="x1", y="x2", hue="label", palette="viridis", ax=ax1) | |
st.pyplot(fig1) | |
else: | |
st.warning("Data generation unsuccessful. Please check your parameters.") | |
# Sidebar: Training Configuration | |
st.sidebar.header("โ๏ธ Model Configuration") | |
test_percent = st.sidebar.slider("Test Set (%)", 10, 90, 20) | |
test_size = test_percent / 100 | |
learning_rate = st.sidebar.selectbox("Choose Learning Rate", [0.0001, 0.001, 0.01, 0.1]) | |
act_fun = st.sidebar.selectbox("Activation Function", ["sigmoid", "tanh", "relu"]) | |
batch_size = st.sidebar.slider("Batch Size", 1, 10000, 6400) | |
epochs = st.sidebar.slider("Training Epochs", 1, 1000, 600) | |
# Train Model and Plot Decision Surface | |
if st.sidebar.button("๐งฎ Train Model & Show Decision Surface"): | |
if st.session_state['X'] is None or st.session_state['y'] is None: | |
st.error("โ ๏ธ Please generate a dataset first.") | |
else: | |
# Preprocessing | |
x_train, x_test, y_train, y_test = train_test_split(st.session_state['X'], st.session_state['y'], test_size=test_size, stratify=st.session_state['y'], random_state=1) | |
scaler = StandardScaler() | |
x_train = scaler.fit_transform(x_train) | |
x_test = scaler.transform(x_test) | |
# Build model | |
model = Sequential() | |
model.add(InputLayer(input_shape=(2,))) | |
for units in [4, 2, 2]: | |
model.add(Dense(units, activation=act_fun)) | |
model.add(Dense(1, activation="sigmoid")) | |
sgd = SGD(learning_rate=learning_rate) | |
model.compile(optimizer=sgd, loss="binary_crossentropy", metrics=["accuracy"]) | |
# Show training progress | |
st.subheader("๐ Model Training Progress") | |
progress_bar = st.progress(0) | |
progress_pct = st.empty() | |
history = model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=0, validation_split=0.2) | |
for epoch in range(epochs): | |
progress = int((epoch + 1) / epochs * 100) | |
progress_bar.progress(progress) | |
progress_pct.write(f"{progress}%") | |
time.sleep(0.01) | |
# Decision surface visualization | |
st.subheader("๐ง Neural Network Decision Boundary") | |
fig2, ax2 = plt.subplots() | |
plot_decision_regions(x_train, y_train, clf=model, legend=2, ax=ax2) | |
st.pyplot(fig2) | |
st.session_state['history'] = history | |
# Show Loss Curve | |
if st.sidebar.button("๐ Display Loss Curve"): | |
if 'history' in st.session_state: | |
st.subheader("๐ Training vs Validation Loss") | |
history = st.session_state['history'] | |
fig3, ax3 = plt.subplots() | |
ax3.plot(history.history['loss'], label='Train Loss') | |
ax3.plot(history.history['val_loss'], label='Val Loss') | |
ax3.set_xlabel("Epochs") | |
ax3.set_ylabel("Loss") | |
ax3.set_title("Loss Progress Over Time") | |
ax3.legend() | |
st.pyplot(fig3) | |
else: | |
st.warning("โณ Train the model to visualize the loss curve.") | |