basicv8vc commited on
Commit
5764a6a
1 Parent(s): 3f8e00e

Create streamlit_app.py

Browse files
Files changed (1) hide show
  1. streamlit_app.py +649 -0
streamlit_app.py ADDED
@@ -0,0 +1,649 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import time
2
+ import re
3
+
4
+ import streamlit as st
5
+ import oneflow as flow
6
+
7
+ import numpy as np
8
+ import pandas as pd
9
+ import altair as alt
10
+ from altair import X, Y, Axis
11
+
12
+ ConstantLR_CODE = """oneflow.optim.lr_scheduler.ConstantLR(
13
+ optimizer: Optimizer,
14
+ factor: float = 1.0 / 3,
15
+ total_iters: int = 5,
16
+ last_step: int = -1,
17
+ verbose: bool = False
18
+ )"""
19
+
20
+ LinearLR_CODE = """oneflow.optim.lr_scheduler.LinearLR(
21
+ optimizer: Optimizer,
22
+ start_factor: float = 1.0 / 3,
23
+ end_factor: float = 1.0,
24
+ total_iters: int = 5,
25
+ last_step: int = -1,
26
+ verbose: bool = False,
27
+ )"""
28
+ ExponentialLR_CODE = """oneflow.optim.lr_scheduler.ExponentialLR(
29
+ optimizer: Optimizer,
30
+ gamma: float,
31
+ last_step: int = -1,
32
+ verbose: bool = False,
33
+ )"""
34
+
35
+ StepLR_CODE = """oneflow.optim.lr_scheduler.StepLR(
36
+ optimizer: Optimizer,
37
+ step_size: int,
38
+ gamma: float = 0.1,
39
+ last_step: int = -1,
40
+ verbose: bool = False,
41
+ )"""
42
+
43
+ MultiStepLR_CODE = """oneflow.optim.lr_scheduler.MultiStepLR(
44
+ optimizer: Optimizer,
45
+ milestones: list,
46
+ gamma: float = 0.1,
47
+ last_step: int = -1,
48
+ verbose: bool = False,
49
+ )"""
50
+
51
+ PolynomialLR_CODE = """oneflow.optim.lr_scheduler.PolynomialLR(
52
+ optimizer,
53
+ steps: int,
54
+ end_learning_rate: float = 0.0001,
55
+ power: float = 1.0,
56
+ cycle: bool = False,
57
+ last_step: int = -1,
58
+ verbose: bool = False,
59
+ )"""
60
+
61
+ CosineDecayLR_CODE = """oneflow.optim.lr_scheduler.CosineDecayLR(
62
+ optimizer: Optimizer,
63
+ decay_steps: int,
64
+ alpha: float = 0.0,
65
+ last_step: int = -1,
66
+ verbose: bool = False,
67
+ )"""
68
+
69
+ CosineAnnealingLR_CODE = """oneflow.optim.lr_scheduler.CosineAnnealingLR(
70
+ optimizer: Optimizer,
71
+ T_max: int,
72
+ eta_min: float = 0.0,
73
+ last_step: int = -1,
74
+ verbose: bool = False,
75
+ )"""
76
+
77
+ CosineAnnealingWarmRestarts_CODE = """oneflow.optim.lr_scheduler.CosineAnnealingWarmRestarts(
78
+ optimizer: Optimizer,
79
+ T_0: int,
80
+ T_mult: int = 1,
81
+ eta_min: float = 0.0,
82
+ decay_rate: float = 1.0,
83
+ restart_limit: int = 0,
84
+ last_step: int = -1,
85
+ verbose: bool = False,
86
+ )"""
87
+
88
+ SequentialLR_CODE = """oneflow.optim.lr_scheduler.SequentialLR(
89
+ optimizer: Optimizer,
90
+ schedulers: Sequence[LRScheduler],
91
+ milestones: Sequence[int],
92
+ interval_rescaling: Union[Sequence[bool], bool] = False,
93
+ last_step: int = -1,
94
+ verbose: bool = False,
95
+ )"""
96
+
97
+ WarmupLR_CODE = """oneflow.optim.lr_scheduler.WarmupLR(
98
+ scheduler_or_optimizer: Union[LRScheduler, Optimizer],
99
+ warmup_factor: float = 1.0 / 3,
100
+ warmup_iters: int = 5,
101
+ warmup_method: str = "linear",
102
+ warmup_prefix: bool = False,
103
+ last_step=-1,
104
+ verbose=False,
105
+ )"""
106
+
107
+ ReduceLROnPlateau_CODE = """oneflow.optim.lr_scheduler.ReduceLROnPlateau(
108
+ optimizer,
109
+ mode="min",
110
+ factor=0.1,
111
+ patience=10,
112
+ threshold=1e-4,
113
+ threshold_mode="rel",
114
+ cooldown=0,
115
+ min_lr=0,
116
+ eps=1e-8,
117
+ verbose=False,
118
+ )"""
119
+
120
+ IS_DISPLAY_CODE = False
121
+
122
+
123
+ def _display(display_steps, steps, lrs):
124
+ # altair
125
+ line = ( # Creating an empty chart in the beginning when the page loads
126
+ alt.Chart(pd.DataFrame({"last_step": [], "lr": []}))
127
+ .mark_line(point={"filled": True, "fill": "red"})
128
+ .encode(
129
+ x=X(
130
+ "last_step",
131
+ axis=Axis(title="step"),
132
+ scale=alt.Scale(domain=[0, steps[-1] + 2]),
133
+ ),
134
+ y=Y(
135
+ "lr",
136
+ axis=Axis(title="lr"),
137
+ scale=alt.Scale(domain=[min(lrs) * 0.8, max(lrs) * 1.2]),
138
+ ),
139
+ color=alt.value("#FFAA00"),
140
+ )
141
+ .properties(width=600, height=400)
142
+ .interactive()
143
+ )
144
+ bar_plot = st.altair_chart(line)
145
+
146
+ for i in range(display_steps):
147
+ df = pd.DataFrame({"last_step": steps[: i + 1], "lr": lrs[: i + 1]})
148
+ line = (
149
+ alt.Chart(df)
150
+ .mark_line(point={"filled": True, "fill": "red"})
151
+ .encode(
152
+ x=X(
153
+ "last_step",
154
+ axis=Axis(title="step"),
155
+ scale=alt.Scale(domain=[0, steps[-1] + 2]),
156
+ ),
157
+ y=Y(
158
+ "lr",
159
+ axis=Axis(title="lr"),
160
+ scale=alt.Scale(domain=[min(lrs) * 0.8, max(lrs) * 1.2]),
161
+ ),
162
+ color=alt.value("#FFAA00"),
163
+ )
164
+ .properties(width=600, height=400)
165
+ .interactive()
166
+ )
167
+ bar_plot.altair_chart(line)
168
+ # Pretend we're doing some computation that takes time.
169
+ time.sleep(0.5)
170
+
171
+
172
+ # st.title("Learning Rate Scheduler Visualization")
173
+ st.header("Learning Rate Scheduler Visualization")
174
+
175
+
176
+ scheduler = st.selectbox(
177
+ "Please choose one scheduler to display",
178
+ (
179
+ "ConstantLR",
180
+ "LinearLR",
181
+ "ExponentialLR",
182
+ "StepLR",
183
+ "MultiStepLR",
184
+ "PolynomialLR",
185
+ "CosineDecayLR",
186
+ "CosineAnnealingLR",
187
+ "CosineAnnealingWarmRestarts",
188
+ # "LambdaLR",
189
+ # "SequentialLR",
190
+ # "WarmupLR",
191
+ # "ChainedScheduler",
192
+ # "ReduceLROnPlateau",
193
+ ),
194
+ )
195
+
196
+ if scheduler == "ConstantLR":
197
+ if IS_DISPLAY_CODE:
198
+ st.code(ConstantLR_CODE, language="python")
199
+ st.write("You can set argument values")
200
+ factor = st.slider("factor:", 0.0, 1.0, 0.3)
201
+ total_iters = st.slider("total_iters:", 0, 20, 5)
202
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
203
+
204
+ net = flow.nn.Linear(10, 2)
205
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
206
+ scheduler = flow.optim.lr_scheduler.ConstantLR(
207
+ optimizer=optimizer, factor=factor, total_iters=total_iters
208
+ )
209
+ steps = []
210
+ lrs = []
211
+ display_steps = max(6, total_iters * 2)
212
+ for i in range(display_steps):
213
+ steps.append(i)
214
+ lrs.append(scheduler.get_last_lr()[0])
215
+ scheduler.step()
216
+
217
+ col1, col2, col3 = st.columns(3)
218
+ if col2.button("Display?"):
219
+ _display(display_steps, steps, lrs)
220
+
221
+
222
+ elif scheduler == "LinearLR":
223
+ if IS_DISPLAY_CODE:
224
+ st.code(LinearLR_CODE, language="python")
225
+ st.write("You can set argument values")
226
+ start_factor = st.slider("start_factor:", 0.0, 1.0, 0.3)
227
+ end_factor = st.slider("end_factor:", 0.0, 1.0, 1.0)
228
+ total_iters = st.slider("total_iters:", 0, 20, 5)
229
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
230
+
231
+ net = flow.nn.Linear(10, 2)
232
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
233
+ scheduler = flow.optim.lr_scheduler.LinearLR(
234
+ optimizer=optimizer,
235
+ start_factor=start_factor,
236
+ end_factor=end_factor,
237
+ total_iters=total_iters,
238
+ )
239
+ steps = []
240
+ lrs = []
241
+ display_steps = max(6, total_iters * 2)
242
+ for i in range(display_steps):
243
+ steps.append(i)
244
+ lrs.append(scheduler.get_last_lr()[0])
245
+ scheduler.step()
246
+
247
+ col1, col2, col3 = st.columns(3)
248
+ if col2.button("Display?"):
249
+ _display(display_steps, steps, lrs)
250
+
251
+ elif scheduler == "ExponentialLR":
252
+ if IS_DISPLAY_CODE:
253
+ st.code(ExponentialLR_CODE, language="python")
254
+ st.write("You can set argument values")
255
+ gamma = st.slider("gamma:", 0.0, 1.0, 0.9)
256
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
257
+
258
+ net = flow.nn.Linear(10, 2)
259
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
260
+ scheduler = flow.optim.lr_scheduler.ExponentialLR(
261
+ optimizer=optimizer,
262
+ gamma=gamma,
263
+ )
264
+ steps = []
265
+ lrs = []
266
+ display_steps = 20
267
+ for i in range(display_steps):
268
+ steps.append(i)
269
+ lrs.append(scheduler.get_last_lr()[0])
270
+ scheduler.step()
271
+
272
+ col1, col2, col3 = st.columns(3)
273
+ if col2.button("Display?"):
274
+ _display(display_steps, steps, lrs)
275
+
276
+ elif scheduler == "StepLR":
277
+ if IS_DISPLAY_CODE:
278
+ st.code(StepLR_CODE, language="python")
279
+ st.write("You can set argument values")
280
+ step_size = st.slider("step_size:", 0, 10, 2)
281
+ gamma = st.slider("gamma:", 0.0, 1.0, 0.9)
282
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
283
+
284
+ net = flow.nn.Linear(10, 2)
285
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
286
+ scheduler = flow.optim.lr_scheduler.StepLR(
287
+ optimizer=optimizer,
288
+ step_size=step_size,
289
+ gamma=gamma,
290
+ )
291
+ steps = []
292
+ lrs = []
293
+ display_steps = 20
294
+ for i in range(display_steps):
295
+ steps.append(i)
296
+ lrs.append(scheduler.get_last_lr()[0])
297
+ scheduler.step()
298
+
299
+ col1, col2, col3 = st.columns(3)
300
+ if col2.button("Display?"):
301
+ _display(display_steps, steps, lrs)
302
+
303
+
304
+ elif scheduler == "MultiStepLR":
305
+ if IS_DISPLAY_CODE:
306
+ st.code(MultiStepLR_CODE, language="python")
307
+ st.write("You can set argument values")
308
+
309
+ collect_numbers = lambda x: [int(i) for i in re.split("[^0-9]", x) if i != ""]
310
+ milestones = st.text_input("PLease enter milestones")
311
+ milestones = collect_numbers(milestones)
312
+ if milestones is None or len(milestones) == 0:
313
+ milestones = [5]
314
+ gamma = st.slider("gamma:", 0.0, 1.0, 0.9)
315
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
316
+
317
+ net = flow.nn.Linear(10, 2)
318
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
319
+ scheduler = flow.optim.lr_scheduler.MultiStepLR(
320
+ optimizer=optimizer,
321
+ milestones=milestones,
322
+ gamma=gamma,
323
+ )
324
+ steps = []
325
+ lrs = []
326
+ display_steps = milestones[-1] + 5
327
+ for i in range(display_steps):
328
+ steps.append(i)
329
+ lrs.append(scheduler.get_last_lr()[0])
330
+ scheduler.step()
331
+
332
+ col1, col2, col3 = st.columns(3)
333
+ if col2.button("Display?"):
334
+ _display(display_steps, steps, lrs)
335
+
336
+ elif scheduler == "PolynomialLR":
337
+ if IS_DISPLAY_CODE:
338
+ st.code(PolynomialLR_CODE, language="python")
339
+ st.write("You can set argument values")
340
+ steps = st.slider("steps:", 1, 10, 5)
341
+ end_learning_rate = st.slider("end_learning_rate", 0.0, 1.0, 0.0001)
342
+ power = st.slider("power", 0.0, 10.0, 1.0)
343
+ cycle = st.checkbox(
344
+ "cycle",
345
+ )
346
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
347
+
348
+ net = flow.nn.Linear(10, 2)
349
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
350
+ scheduler = flow.optim.lr_scheduler.PolynomialLR(
351
+ optimizer=optimizer,
352
+ steps=steps,
353
+ end_learning_rate=end_learning_rate,
354
+ power=power,
355
+ cycle=cycle,
356
+ )
357
+ x_steps = []
358
+ lrs = []
359
+ display_steps = max(steps + 5, 10)
360
+ for i in range(display_steps):
361
+ x_steps.append(i)
362
+ lrs.append(scheduler.get_last_lr()[0])
363
+ scheduler.step()
364
+
365
+ col1, col2, col3 = st.columns(3)
366
+ if col2.button("Display?"):
367
+ _display(display_steps, x_steps, lrs)
368
+
369
+ elif scheduler == "CosineDecayLR":
370
+ if IS_DISPLAY_CODE:
371
+ st.code(CosineDecayLR_CODE, language="python")
372
+ st.write("You can set argument values")
373
+ decay_steps = st.slider("decay_steps:", 0, 10, 5)
374
+ alpha = st.slider("alpha", 0.0, 1.0, 0.0)
375
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
376
+
377
+ net = flow.nn.Linear(10, 2)
378
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
379
+ scheduler = flow.optim.lr_scheduler.CosineDecayLR(
380
+ optimizer=optimizer,
381
+ decay_steps=decay_steps,
382
+ alpha=alpha,
383
+ )
384
+ x_steps = []
385
+ lrs = []
386
+ display_steps = max(decay_steps + 5, 10)
387
+ for i in range(display_steps):
388
+ x_steps.append(i)
389
+ lrs.append(scheduler.get_last_lr()[0])
390
+ scheduler.step()
391
+
392
+ col1, col2, col3 = st.columns(3)
393
+ if col2.button("Display?"):
394
+ _display(display_steps, x_steps, lrs)
395
+
396
+ elif scheduler == "CosineAnnealingLR":
397
+ if IS_DISPLAY_CODE:
398
+ st.code(CosineAnnealingLR_CODE, language="python")
399
+ st.write("You can set argument values")
400
+ T_max = st.slider("T_max", 1, 20, 20)
401
+ eta_min = st.slider("eta_min", 0.0, 1.0, 0.0)
402
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
403
+
404
+ net = flow.nn.Linear(10, 2)
405
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
406
+ scheduler = flow.optim.lr_scheduler.CosineAnnealingLR(
407
+ optimizer=optimizer,
408
+ T_max=T_max,
409
+ eta_min=eta_min,
410
+ )
411
+ x_steps = []
412
+ lrs = []
413
+ display_steps = max(T_max + 5, 20)
414
+ for i in range(display_steps):
415
+ x_steps.append(i)
416
+ lrs.append(scheduler.get_last_lr()[0])
417
+ scheduler.step()
418
+
419
+ col1, col2, col3 = st.columns(3)
420
+ if col2.button("Display?"):
421
+ _display(display_steps, x_steps, lrs)
422
+
423
+ elif scheduler == "CosineAnnealingWarmRestarts":
424
+ if IS_DISPLAY_CODE:
425
+ st.code(CosineAnnealingWarmRestarts_CODE, language="python")
426
+ st.write("You can set argument values")
427
+ T_0 = st.slider("T_0", 1, 20, 5)
428
+ T_mult = st.slider("T_mult", 1, 5, 1)
429
+ eta_min = st.slider("eta_min", 0.0, 1.0, 0.0)
430
+ decay_rate = st.slider("decay_rate", 0.0, 1.0, 1.0)
431
+ restart_limit = st.slider("restart_limit", 0, 5, 0)
432
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
433
+
434
+ net = flow.nn.Linear(10, 2)
435
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
436
+ scheduler = flow.optim.lr_scheduler.CosineAnnealingWarmRestarts(
437
+ optimizer=optimizer,
438
+ T_0=T_0,
439
+ T_mult=T_mult,
440
+ eta_min=eta_min,
441
+ decay_rate=decay_rate,
442
+ restart_limit=restart_limit,
443
+ )
444
+ x_steps = []
445
+ lrs = []
446
+ display_steps = max(T_0 + 5, 20)
447
+ for i in range(display_steps):
448
+ x_steps.append(i)
449
+ lrs.append(scheduler.get_last_lr()[0])
450
+ scheduler.step()
451
+
452
+ col1, col2, col3 = st.columns(3)
453
+ if col2.button("Display?"):
454
+ _display(display_steps, x_steps, lrs)
455
+
456
+ # elif scheduler == "LambdaLR":
457
+ # code = """oneflow.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda, last_step=-1, verbose=False)"""
458
+ # st.code(code, language="python")
459
+
460
+ elif scheduler == "SequentialLR":
461
+ if IS_DISPLAY_CODE:
462
+ st.code(SequentialLR_CODE, language="python")
463
+ st.write("You can set argument values")
464
+ schedulers = st.multiselect(
465
+ "you can choose multiple schedulers",
466
+ [
467
+ "ConstantLR",
468
+ "LinearLR",
469
+ "ExponentialLR",
470
+ "StepLR",
471
+ "MultiStepLR",
472
+ "PolynomialLR",
473
+ "CosineDecayLR",
474
+ "CosineAnnealingLR",
475
+ "CosineAnnealingWarmRestarts",
476
+ "ConstantLR",
477
+ "LinearLR",
478
+ "ExponentialLR",
479
+ "StepLR",
480
+ "MultiStepLR",
481
+ "PolynomialLR",
482
+ "CosineDecayLR",
483
+ "CosineAnnealingLR",
484
+ "CosineAnnealingWarmRestarts",
485
+ ],
486
+ )
487
+ collect_numbers = lambda x: [int(i) for i in re.split("[^0-9]", x) if i != ""]
488
+ milestones = st.text_input("PLease enter milestones")
489
+ milestones = collect_numbers(milestones)
490
+ interval_rescaling = st.checkbox("interval_rescaling")
491
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
492
+
493
+ net = flow.nn.Linear(10, 2)
494
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
495
+ scheduler = flow.optim.lr_scheduler.SequentialLR(
496
+ optimizer=optimizer,
497
+ schedulers=schedulers,
498
+ milestones=milestones,
499
+ interval_rescaling=interval_rescaling,
500
+ )
501
+ x_steps = []
502
+ lrs = []
503
+ display_steps = max(milestones[-1] + 5, 20)
504
+ for i in range(display_steps):
505
+ x_steps.append(i)
506
+ lrs.append(scheduler.get_last_lr()[0])
507
+ scheduler.step()
508
+
509
+ col1, col2, col3 = st.columns(3)
510
+ if col2.button("Display?"):
511
+ _display(display_steps, x_steps, lrs)
512
+
513
+ elif scheduler == "WarmupLR":
514
+ if IS_DISPLAY_CODE:
515
+ st.code(WarmupLR_CODE, language="python")
516
+ scheduler_or_optimizer = st.selectbox(
517
+ "choose one scheduler for scheduler_or_optimizer",
518
+ [
519
+ "ConstantLR",
520
+ "LinearLR",
521
+ "ExponentialLR",
522
+ "StepLR",
523
+ "MultiStepLR",
524
+ "PolynomialLR",
525
+ "CosineDecayLR",
526
+ "CosineAnnealingLR",
527
+ "CosineAnnealingWarmRestarts",
528
+ ],
529
+ )
530
+ warmup_factor = st.slider("warmup_factor:", 0.0, 1.0, 0.3)
531
+ warmup_iters = st.slider("warmup_iters:", 1, 10, 5)
532
+ warmup_method = st.selectbox("warmup_method", ["linear", "constant"])
533
+ warmup_prefix = st.checkbox("warmup_prefix")
534
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
535
+
536
+ net = flow.nn.Linear(10, 2)
537
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
538
+ scheduler = flow.optim.lr_scheduler.WarmupLR(
539
+ optimizer=optimizer,
540
+ scheduler_or_optimizer=scheduler_or_optimizer,
541
+ warmup_factor=warmup_factor,
542
+ warmup_iters=warmup_iters,
543
+ warmup_method=warmup_method,
544
+ warmup_prefix=warmup_prefix,
545
+ )
546
+ x_steps = []
547
+ lrs = []
548
+ display_steps = max(warmup_factor + 5, 20)
549
+ for i in range(display_steps):
550
+ x_steps.append(i)
551
+ lrs.append(scheduler.get_last_lr()[0])
552
+ scheduler.step()
553
+
554
+ col1, col2, col3 = st.columns(3)
555
+ if col2.button("Display?"):
556
+ _display(display_steps, x_steps, lrs)
557
+
558
+
559
+ elif scheduler == "ChainedScheduler":
560
+ if IS_DISPLAY_CODE:
561
+ code = """oneflow.optim.lr_scheduler.ChainedScheduler(schedulers)"""
562
+ st.code(code, language="python")
563
+ st.write("You can set argument values")
564
+ schedulers = st.multiselect(
565
+ "you can choose multiple schedulers",
566
+ [
567
+ "ConstantLR",
568
+ "LinearLR",
569
+ "ExponentialLR",
570
+ "StepLR",
571
+ "MultiStepLR",
572
+ "PolynomialLR",
573
+ "CosineDecayLR",
574
+ "CosineAnnealingLR",
575
+ "CosineAnnealingWarmRestarts",
576
+ "ConstantLR",
577
+ "LinearLR",
578
+ "ExponentialLR",
579
+ "StepLR",
580
+ "MultiStepLR",
581
+ "PolynomialLR",
582
+ "CosineDecayLR",
583
+ "CosineAnnealingLR",
584
+ "CosineAnnealingWarmRestarts",
585
+ ],
586
+ )
587
+ lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
588
+
589
+ net = flow.nn.Linear(10, 2)
590
+ optimizer = flow.optim.SGD(net.parameters(), lr=lr)
591
+ scheduler = flow.optim.lr_scheduler.ChainedScheduler(
592
+ optimizer=optimizer,
593
+ schedulers=schedulers,
594
+ )
595
+ x_steps = []
596
+ lrs = []
597
+ display_steps = 20
598
+ for i in range(display_steps):
599
+ x_steps.append(i)
600
+ lrs.append(scheduler.get_last_lr()[0])
601
+ scheduler.step()
602
+
603
+ col1, col2, col3 = st.columns(3)
604
+ if col2.button("Display?"):
605
+ _display(display_steps, x_steps, lrs)
606
+
607
+ # elif scheduler == "ReduceLROnPlateau":
608
+ # st.code(ReduceLROnPlateau_CODE, language="python")
609
+ # st.write("You can set argument values")
610
+ # mode = st.selectbox(
611
+ # "mode",
612
+ # [
613
+ # "min",
614
+ # "max",
615
+ # ],
616
+ # )
617
+ # factor = st.slider("factor", 1e-5, 1.0 - 1e-5, 0.1)
618
+ # patience = st.slider("patience", 1, 20, 10)
619
+ # threshold = st.slider("threshold", 1e-4, 9e-4, 1e-4)
620
+ # threshold_mode = st.selectbox("threshold_mode", ["rel", "abs"])
621
+ # cooldown = st.slider("cooldown", 0, 10, 0)
622
+ # min_lr = st.slider("min_lr", 0.0, 1.0, 0.0)
623
+ # eps = st.slider("eps", 1e-8, 9e-8, 1e-8)
624
+ # lr = st.slider("initial learning rate in Optimizer(e.g. SGD, Adam):", 0.0, 1.0, 0.1)
625
+
626
+ # net = flow.nn.Linear(10, 2)
627
+ # optimizer = flow.optim.SGD(net.parameters(), lr=lr)
628
+ # scheduler = flow.optim.lr_scheduler.ReduceLROnPlateau(
629
+ # optimizer=optimizer,
630
+ # mode=mode,
631
+ # factor=factor,
632
+ # patience=patience,
633
+ # threshold=threshold,
634
+ # threshold_mode=threshold_mode,
635
+ # cooldown=cooldown,
636
+ # min_lr=min_lr,
637
+ # eps=eps,
638
+ # )
639
+ # x_steps = []
640
+ # lrs = []
641
+ # display_steps = 25
642
+ # for i in range(display_steps):
643
+ # x_steps.append(i)
644
+ # lrs.append(scheduler.get_last_lr()[0])
645
+ # scheduler.step()
646
+
647
+ # col1, col2, col3 = st.columns(3)
648
+ # if col2.button("Display?"):
649
+ # _display(display_steps, x_steps, lrs)