Suhas-G commited on
Commit
f100013
·
0 Parent(s):

ChatGPT generated app with many attempts

Browse files
Files changed (1) hide show
  1. app.py +76 -0
app.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import plotly.graph_objects as go
3
+ import numpy as np
4
+ import torch
5
+ from torch.optim import SGD
6
+ import torch.optim.lr_scheduler as lr_schedulers
7
+
8
+ # List of scheduler names
9
+ schedulers = [
10
+ "ConstantLR", "LinearLR", "ExponentialLR", "StepLR", "MultiStepLR",
11
+ "CosineAnnealingLR", "CyclicLR", "OneCycleLR", "ReduceLROnPlateau",
12
+ "CosineAnnealingWarmRestarts"
13
+ ]
14
+
15
+ # Function to create a Plotly line plot
16
+ def create_line_plot(x, y, title="Line Plot"):
17
+ fig = go.Figure()
18
+ fig.add_trace(go.Scatter(x=x, y=y, mode="lines", name=title))
19
+ fig.update_layout(title=title, xaxis_title="Steps", yaxis_title="Learning Rate")
20
+ return fig
21
+
22
+ # Generic function to get learning rate schedule for any scheduler
23
+ def get_lr_schedule(scheduler_name, initial_lr=0.1, total_steps=100, scheduler_kwargs=None):
24
+ if scheduler_kwargs is None:
25
+ scheduler_kwargs = {}
26
+
27
+ # Initialize a dummy optimizer with a parameter
28
+ optimizer = SGD([torch.nn.Parameter(torch.zeros(1))], lr=initial_lr)
29
+
30
+ # Dynamically get the scheduler class from the torch.optim.lr_scheduler module
31
+ scheduler_class = getattr(lr_schedulers, scheduler_name)
32
+
33
+ # Initialize the scheduler with the keyword arguments
34
+ scheduler = scheduler_class(optimizer, **scheduler_kwargs)
35
+
36
+ # Collect the learning rate for each step
37
+ lr_schedule = []
38
+ for step in range(total_steps):
39
+ lr_schedule.append(scheduler.get_last_lr()[0])
40
+ scheduler.step()
41
+
42
+ return lr_schedule
43
+
44
+ # Wrapper function for Gradio that handles scheduler selection
45
+ def interactive_plot(x_min, x_max, scheduler_name, scheduler_params):
46
+ # Define x as steps for visualization
47
+ steps = np.arange(int(x_max))
48
+
49
+ # Generate learning rate schedule for the selected scheduler using scheduler_params
50
+ lr_schedule = get_lr_schedule(scheduler_name=scheduler_name, initial_lr=0.1, total_steps=len(steps), scheduler_kwargs=scheduler_params)
51
+
52
+ # Plot the learning rate schedule
53
+ title = f"Learning Rate Schedule with {scheduler_name}"
54
+ fig = create_line_plot(steps, lr_schedule, title=title)
55
+
56
+ return fig
57
+
58
+ # Define Gradio interface
59
+ with gr.Blocks() as demo:
60
+ gr.Markdown("# Learning Rate Scheduler Plotter")
61
+ x_min = gr.Number(label="X Min", value=0)
62
+ x_max = gr.Number(label="X Max (Steps)", value=100)
63
+
64
+ # Dropdown for scheduler selection
65
+ scheduler_dropdown = gr.Dropdown(choices=schedulers, label="Learning Rate Scheduler", value="ConstantLR")
66
+
67
+ # Scheduler parameter inputs (e.g., for ConstantLR, factor and total_iters)
68
+ scheduler_params = gr.JSON(label="Scheduler Parameters", value={"factor": 0.5, "total_iters": 10})
69
+
70
+ plot = gr.Plot(label="Learning Rate Graph")
71
+
72
+ plot_button = gr.Button("Plot Graph")
73
+ plot_button.click(interactive_plot, inputs=[x_min, x_max, scheduler_dropdown, scheduler_params], outputs=plot)
74
+
75
+ # Launch the Gradio app
76
+ demo.launch()