We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 1195427 commit 06285dcCopy full SHA for 06285dc
questions/152_exponentialLR/solution.py
@@ -27,11 +27,4 @@ def get_lr(self, epoch):
27
current_lr = self.initial_lr * (self.gamma ** epoch)
28
29
# Round the learning rate to 4 decimal places
30
- return round(current_lr, 4)
31
-
32
33
34
35
36
-scheduler = ExponentialLRScheduler(initial_lr=0.005, gamma=0.99)
37
-print(f"{scheduler.get_lr(epoch=0):.4f}\\n{scheduler.get_lr(epoch=10):.4f}\\n{scheduler.get_lr(epoch=20):.4f}")
+ return round(current_lr, 4)
0 commit comments