Spaces:
Sleeping
Sleeping
eaglelandsonce
commited on
Commit
•
cc6dcfa
1
Parent(s):
4417123
Create Prompt2Example.py
Browse files- pages/Prompt2Example.py +64 -0
pages/Prompt2Example.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import tensorflow as tf
|
3 |
+
import numpy as np
|
4 |
+
import matplotlib.pyplot as plt
|
5 |
+
|
6 |
+
# Generate synthetic data
|
7 |
+
def generate_data(num_points=1000):
|
8 |
+
np.random.seed(0)
|
9 |
+
X = np.random.randn(num_points, 2)
|
10 |
+
y = (X[:, 0]**2 + X[:, 1]**2 < 1).astype(int)
|
11 |
+
return X, y
|
12 |
+
|
13 |
+
# Create the model
|
14 |
+
def create_model(input_shape, learning_rate, activation, hidden_layers):
|
15 |
+
model = tf.keras.Sequential()
|
16 |
+
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
|
17 |
+
for neurons in hidden_layers:
|
18 |
+
model.add(tf.keras.layers.Dense(neurons, activation=activation))
|
19 |
+
model.add(tf.keras.layers.Dense(1, activation='sigmoid'))
|
20 |
+
|
21 |
+
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=learning_rate),
|
22 |
+
loss='binary_crossentropy',
|
23 |
+
metrics=['accuracy'])
|
24 |
+
return model
|
25 |
+
|
26 |
+
# Plot decision boundary
|
27 |
+
def plot_decision_boundary(model, X, y):
|
28 |
+
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
|
29 |
+
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
|
30 |
+
xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.1),
|
31 |
+
np.arange(y_min, y_max, 0.1))
|
32 |
+
grid = np.c_[xx.ravel(), yy.ravel()]
|
33 |
+
probs = model.predict(grid).reshape(xx.shape)
|
34 |
+
|
35 |
+
plt.contourf(xx, yy, probs, alpha=0.8)
|
36 |
+
plt.scatter(X[:, 0], X[:, 1], c=y, edgecolor='k', marker='o')
|
37 |
+
st.pyplot(plt.gcf())
|
38 |
+
|
39 |
+
def main():
|
40 |
+
st.title("Interactive Neural Network Training")
|
41 |
+
|
42 |
+
# Sidebar inputs
|
43 |
+
learning_rate = st.sidebar.slider("Learning rate", 0.001, 0.1, 0.03)
|
44 |
+
activation = st.sidebar.selectbox("Activation function", ["relu", "tanh", "sigmoid"])
|
45 |
+
num_hidden_layers = st.sidebar.slider("Number of hidden layers", 1, 5, 2)
|
46 |
+
neurons_per_layer = st.sidebar.slider("Neurons per layer", 1, 10, 4)
|
47 |
+
batch_size = st.sidebar.slider("Batch size", 1, 100, 10)
|
48 |
+
num_epochs = st.sidebar.slider("Number of epochs", 1, 1000, 100)
|
49 |
+
|
50 |
+
hidden_layers = [neurons_per_layer] * num_hidden_layers
|
51 |
+
|
52 |
+
X, y = generate_data()
|
53 |
+
|
54 |
+
model = create_model(input_shape=(2,), learning_rate=learning_rate, activation=activation, hidden_layers=hidden_layers)
|
55 |
+
model.fit(X, y, epochs=num_epochs, batch_size=batch_size, validation_split=0.5, verbose=0)
|
56 |
+
|
57 |
+
st.write("Training complete")
|
58 |
+
|
59 |
+
fig, ax = plt.subplots()
|
60 |
+
plot_decision_boundary(model, X, y)
|
61 |
+
st.pyplot(fig)
|
62 |
+
|
63 |
+
if __name__ == "__main__":
|
64 |
+
main()
|