TuNNe / main.py
Niharmahesh's picture
Rename app.py to main.py
eaadba8 verified
import streamlit as st
import tensorflow as tf
from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten
from tensorflow.keras.optimizers import Adam
import matplotlib.pyplot as plt
# Load MNIST data
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
# Streamlit app
st.title("MNIST Neural Network Training")
# Sidebar for parameter selection
st.sidebar.header("Model Parameters")
learning_rate = st.sidebar.slider("Learning Rate", 0.0001, 0.01, 0.001)
batch_size = st.sidebar.slider("Batch Size", 16, 128, 32)
epochs = st.sidebar.slider("Epochs", 1, 20, 5)
# Model building function
def build_model(learning_rate):
model = Sequential([
Flatten(input_shape=(28, 28)),
Dense(128, activation='relu'),
Dense(10, activation='softmax')
])
model.compile(optimizer=Adam(learning_rate=learning_rate),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
return model
# Train the model
if st.sidebar.button("Train Model"):
model = build_model(learning_rate)
history = model.fit(x_train, y_train, epochs=epochs, batch_size=batch_size, validation_data=(x_test, y_test))
# Plot training & validation accuracy values
fig, ax = plt.subplots()
ax.plot(history.history['accuracy'])
ax.plot(history.history['val_accuracy'])
ax.set_title('Model accuracy')
ax.set_ylabel('Accuracy')
ax.set_xlabel('Epoch')
ax.legend(['Train', 'Test'], loc='upper left')
st.pyplot(fig)
# Plot training & validation loss values
fig, ax = plt.subplots()
ax.plot(history.history['loss'])
ax.plot(history.history['val_loss'])
ax.set_title('Model loss')
ax.set_ylabel('Loss')
ax.set_xlabel('Epoch')
ax.legend(['Train', 'Test'], loc='upper left')
st.pyplot(fig)
# Evaluate the model
loss, accuracy = model.evaluate(x_test, y_test, verbose=2)
st.write(f"Test Accuracy: {accuracy:.4f}")
st.write(f"Test Loss: {loss:.4f}")