Nanobit commited on
Commit
fe1f4c4
1 Parent(s): dae14e5

Lint schedulers

Browse files
Files changed (1) hide show
  1. src/axolotl/utils/schedulers.py +9 -1
src/axolotl/utils/schedulers.py CHANGED
@@ -1,7 +1,13 @@
 
 
1
  from torch.optim.lr_scheduler import LRScheduler
2
 
3
 
4
  class InterpolatingLogScheduler(LRScheduler):
 
 
 
 
5
  def __init__(self, optimizer, num_steps, min_lr, max_lr, last_epoch=-1):
6
  """A scheduler that interpolates learning rates in a logarithmic fashion
7
 
@@ -19,7 +25,9 @@ class InterpolatingLogScheduler(LRScheduler):
19
  self.num_steps = num_steps
20
  self.min_lr = min_lr
21
  self.max_lr = max_lr
22
- self.q = (max_lr / min_lr) ** (1 / (num_steps - 1))
 
 
23
  super().__init__(optimizer, last_epoch)
24
 
25
  def get_lr(self):
 
1
+ """Module for custom LRScheduler class"""
2
+
3
  from torch.optim.lr_scheduler import LRScheduler
4
 
5
 
6
  class InterpolatingLogScheduler(LRScheduler):
7
+ """
8
+ A scheduler that interpolates learning rates in a logarithmic fashion
9
+ """
10
+
11
  def __init__(self, optimizer, num_steps, min_lr, max_lr, last_epoch=-1):
12
  """A scheduler that interpolates learning rates in a logarithmic fashion
13
 
 
25
  self.num_steps = num_steps
26
  self.min_lr = min_lr
27
  self.max_lr = max_lr
28
+ self.q = (max_lr / min_lr) ** ( # pylint: disable=invalid-name
29
+ 1 / (num_steps - 1)
30
+ )
31
  super().__init__(optimizer, last_epoch)
32
 
33
  def get_lr(self):