Module monk.gluon.finetune.level_10_schedulers_main

Expand source code
from gluon.finetune.imports import *
from system.imports import *

from gluon.finetune.level_9_transforms_main import prototype_transforms


class prototype_schedulers(prototype_transforms):
    '''
    Main class for all learning rate schedulers in expert mode

    Args:
        verbose (int): Set verbosity levels
                        0 - Print Nothing
                        1 - Print desired details
    '''

    def __init__(self, verbose=1):
        super().__init__(verbose=verbose);

    ###############################################################################################################################################
    def lr_fixed(self):
        '''
        Set learning rate fixed

        Args:
            None

        Returns:
            None
        '''
        self.system_dict = scheduler_fixed(self.system_dict);
        
        self.custom_print("Learning rate scheduler");
        self.custom_print("    Name:   {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["name"]));
        self.custom_print("    Params: {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
        

    ###############################################################################################################################################
    def lr_step_decrease(self, step_size, gamma=0.1, last_epoch=-1):
        '''
        Set learning rate to decrease in regular steps

        Args:
            step_size (int): Step interval for decreasing learning rate
            gamma (str): Reduction multiplier for reducing learning rate post every step
            last_epoch (int): Set this epoch to a level post which learning rate will not be decreased

        Returns:
            None
        '''
        self.system_dict = scheduler_step(self.system_dict, step_size, gamma=gamma, last_epoch=last_epoch);
        
        self.custom_print("Learning rate scheduler");
        self.custom_print("    Name:   {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["name"]));
        self.custom_print("    Params: {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
        

    ###############################################################################################################################################
    def lr_multistep_decrease(self, milestones, gamma=0.1, last_epoch=-1):
        '''
        Set learning rate to decrease in irregular steps

        Args:
            milestones (list): List of epochs at which learning rate is to be decreased
            gamma (str): Reduction multiplier for reducing learning rate post every step
            last_epoch (int): Dummy variable

        Returns:
            None
        '''
        self.system_dict = scheduler_multistep(self.system_dict, milestones, gamma=gamma, last_epoch=last_epoch);
        
        self.custom_print("Learning rate scheduler");
        self.custom_print("    Name:   {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["name"]));
        self.custom_print("    Params: {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################

Classes

class prototype_schedulers (verbose=1)

Main class for all learning rate schedulers in expert mode

Args

verbose : int
Set verbosity levels 0 - Print Nothing 1 - Print desired details
Expand source code
class prototype_schedulers(prototype_transforms):
    '''
    Main class for all learning rate schedulers in expert mode

    Args:
        verbose (int): Set verbosity levels
                        0 - Print Nothing
                        1 - Print desired details
    '''

    def __init__(self, verbose=1):
        super().__init__(verbose=verbose);

    ###############################################################################################################################################
    def lr_fixed(self):
        '''
        Set learning rate fixed

        Args:
            None

        Returns:
            None
        '''
        self.system_dict = scheduler_fixed(self.system_dict);
        
        self.custom_print("Learning rate scheduler");
        self.custom_print("    Name:   {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["name"]));
        self.custom_print("    Params: {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
        

    ###############################################################################################################################################
    def lr_step_decrease(self, step_size, gamma=0.1, last_epoch=-1):
        '''
        Set learning rate to decrease in regular steps

        Args:
            step_size (int): Step interval for decreasing learning rate
            gamma (str): Reduction multiplier for reducing learning rate post every step
            last_epoch (int): Set this epoch to a level post which learning rate will not be decreased

        Returns:
            None
        '''
        self.system_dict = scheduler_step(self.system_dict, step_size, gamma=gamma, last_epoch=last_epoch);
        
        self.custom_print("Learning rate scheduler");
        self.custom_print("    Name:   {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["name"]));
        self.custom_print("    Params: {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
        

    ###############################################################################################################################################
    def lr_multistep_decrease(self, milestones, gamma=0.1, last_epoch=-1):
        '''
        Set learning rate to decrease in irregular steps

        Args:
            milestones (list): List of epochs at which learning rate is to be decreased
            gamma (str): Reduction multiplier for reducing learning rate post every step
            last_epoch (int): Dummy variable

        Returns:
            None
        '''
        self.system_dict = scheduler_multistep(self.system_dict, milestones, gamma=gamma, last_epoch=last_epoch);
        
        self.custom_print("Learning rate scheduler");
        self.custom_print("    Name:   {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["name"]));
        self.custom_print("    Params: {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["params"]));
        self.custom_print("");

Ancestors

  • gluon.finetune.level_9_transforms_main.prototype_transforms
  • gluon.finetune.level_8_layers_main.prototype_layers
  • gluon.finetune.level_7_aux_main.prototype_aux
  • gluon.finetune.level_6_params_main.prototype_params
  • gluon.finetune.level_5_state_base.finetune_state
  • gluon.finetune.level_4_evaluation_base.finetune_evaluation
  • gluon.finetune.level_3_training_base.finetune_training
  • gluon.finetune.level_2_model_base.finetune_model
  • gluon.finetune.level_1_dataset_base.finetune_dataset
  • system.base_class.system

Methods

def lr_fixed(self)

Set learning rate fixed

Args

None
 

Returns

None
 
Expand source code
def lr_fixed(self):
    '''
    Set learning rate fixed

    Args:
        None

    Returns:
        None
    '''
    self.system_dict = scheduler_fixed(self.system_dict);
    
    self.custom_print("Learning rate scheduler");
    self.custom_print("    Name:   {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["name"]));
    self.custom_print("    Params: {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["params"]));
    self.custom_print("");
def lr_multistep_decrease(self, milestones, gamma=0.1, last_epoch=-1)

Set learning rate to decrease in irregular steps

Args

milestones : list
List of epochs at which learning rate is to be decreased
gamma : str
Reduction multiplier for reducing learning rate post every step
last_epoch : int
Dummy variable

Returns

None
 
Expand source code
def lr_multistep_decrease(self, milestones, gamma=0.1, last_epoch=-1):
    '''
    Set learning rate to decrease in irregular steps

    Args:
        milestones (list): List of epochs at which learning rate is to be decreased
        gamma (str): Reduction multiplier for reducing learning rate post every step
        last_epoch (int): Dummy variable

    Returns:
        None
    '''
    self.system_dict = scheduler_multistep(self.system_dict, milestones, gamma=gamma, last_epoch=last_epoch);
    
    self.custom_print("Learning rate scheduler");
    self.custom_print("    Name:   {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["name"]));
    self.custom_print("    Params: {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["params"]));
    self.custom_print("");
def lr_step_decrease(self, step_size, gamma=0.1, last_epoch=-1)

Set learning rate to decrease in regular steps

Args

step_size : int
Step interval for decreasing learning rate
gamma : str
Reduction multiplier for reducing learning rate post every step
last_epoch : int
Set this epoch to a level post which learning rate will not be decreased

Returns

None
 
Expand source code
def lr_step_decrease(self, step_size, gamma=0.1, last_epoch=-1):
    '''
    Set learning rate to decrease in regular steps

    Args:
        step_size (int): Step interval for decreasing learning rate
        gamma (str): Reduction multiplier for reducing learning rate post every step
        last_epoch (int): Set this epoch to a level post which learning rate will not be decreased

    Returns:
        None
    '''
    self.system_dict = scheduler_step(self.system_dict, step_size, gamma=gamma, last_epoch=last_epoch);
    
    self.custom_print("Learning rate scheduler");
    self.custom_print("    Name:   {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["name"]));
    self.custom_print("    Params: {}".format(self.system_dict["hyper-parameters"]["learning_rate_scheduler"]["params"]));
    self.custom_print("");