Module monk.tf_keras_1.finetune.level_12_losses_main
Expand source code
from tf_keras_1.finetune.imports import *
from system.imports import *
from tf_keras_1.finetune.level_11_optimizers_main import prototype_optimizers
class prototype_losses(prototype_optimizers):
    '''
    Main class for all parameters in expert mode
    Args:
        verbose (int): Set verbosity levels
                        0 - Print Nothing
                        1 - Print desired details
    '''
    def __init__(self, verbose=1):
        super().__init__(verbose=verbose);
    
    ###############################################################################################################################################
    def loss_l1(self, weight=None, batch_axis=0):
        '''
        Select L1 Loss
        Args:
            weight (float): global scalar for weight loss
            batch_axis (int): Axis representing number of elements in the batch - N
        Returns:
            None
        '''
        self.system_dict = l1(self.system_dict, weight=weight, batch_axis=batch_axis);
        self.custom_print("Loss");
        self.custom_print("    Name:          {}".format(self.system_dict["hyper-parameters"]["loss"]["name"]));
        self.custom_print("    Params:        {}".format(self.system_dict["hyper-parameters"]["loss"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
    
    ###############################################################################################################################################
    def loss_l2(self, weight=1.0, batch_axis=0):
        '''
        Select L2 Loss
        Args:
            weight (float): global scalar for weight loss
            batch_axis (int): Axis representing number of elements in the batch - N
        Returns:
            None
        '''
        self.system_dict = l2(self.system_dict, weight=weight, batch_axis=batch_axis);
        self.custom_print("Loss");
        self.custom_print("    Name:          {}".format(self.system_dict["hyper-parameters"]["loss"]["name"]));
        self.custom_print("    Params:        {}".format(self.system_dict["hyper-parameters"]["loss"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
    ###############################################################################################################################################
    def loss_crossentropy(self, weight=None, batch_axis=0, axis_to_sum_over=-1, 
                                    label_as_categories=True, label_smoothing=False):
        '''
        Select soaftmax crossentropy Loss - Auto softmax before applying loss 
        Args:
            weight (float): global scalar for weight loss
            batch_axis (int): Axis representing number of elements in the batch - N
            axis_to_sum_over (int): Set as -1
            label_as_categories (bool): Fixed as True
            label_smoothing (bool): If True, label smoothning is applied.
        Returns:
            None
        '''
        self.system_dict = crossentropy(self.system_dict, weight=weight, batch_axis=batch_axis,
                                                axis_to_sum_over=axis_to_sum_over, label_as_categories=label_as_categories, 
                                                label_smoothing=label_smoothing);
        self.custom_print("Loss");
        self.custom_print("    Name:          {}".format(self.system_dict["hyper-parameters"]["loss"]["name"]));
        self.custom_print("    Params:        {}".format(self.system_dict["hyper-parameters"]["loss"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
    ###############################################################################################################################################
    def loss_binary_crossentropy(self, weight=None, batch_axis=0):
        '''
        Select binary crossentropy Loss - Need to manually apply sigmoid
        Args:
            weight (float): global scalar for weight loss
            batch_axis (int): Axis representing number of elements in the batch - N
        Returns:
            None
        '''
        self.system_dict = binary_crossentropy(self.system_dict, weight=weight, batch_axis=batch_axis);
        self.custom_print("Loss");
        self.custom_print("    Name:          {}".format(self.system_dict["hyper-parameters"]["loss"]["name"]));
        self.custom_print("    Params:        {}".format(self.system_dict["hyper-parameters"]["loss"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
    ###############################################################################################################################################
    def loss_kldiv(self, log_pre_applied=False, weight=None, batch_axis=0, axis_to_sum_over=-1):
        '''
        Select lkdiv Loss
        Args:
            weight (float): global scalar for weight loss
            batch_axis (int): Axis representing number of elements in the batch - N
            axis_to_sum_over (int): Set as -1
            log_pre_applied (bool): If set as False, then logarithmic function is auto applied over target variables
        Returns:
            None
        '''
        self.system_dict = kldiv(self.system_dict, weight=weight, batch_axis=batch_axis,
                                axis_to_sum_over=axis_to_sum_over, log_pre_applied=log_pre_applied);
        self.custom_print("Loss");
        self.custom_print("    Name:          {}".format(self.system_dict["hyper-parameters"]["loss"]["name"]));
        self.custom_print("    Params:        {}".format(self.system_dict["hyper-parameters"]["loss"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
    ###############################################################################################################################################
    def loss_hinge(self, weight=None, batch_axis=0, margin=1):
        '''
        Select hinge Loss
        Args:
            weight (float): global scalar for weight loss
            batch_axis (int): Axis representing number of elements in the batch - N
            margin (float): MArgin value.
        Returns:
            None
        '''
        self.system_dict = hinge(self.system_dict, margin=margin,
                                weight=weight, batch_axis=batch_axis);
        self.custom_print("Loss");
        self.custom_print("    Name:          {}".format(self.system_dict["hyper-parameters"]["loss"]["name"]));
        self.custom_print("    Params:        {}".format(self.system_dict["hyper-parameters"]["loss"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################
    ###############################################################################################################################################
    def loss_squared_hinge(self, weight=None, batch_axis=0, margin=1):
        '''
        Select Squared hinge Loss
        Args:
            weight (float): global scalar for weight loss
            batch_axis (int): Axis representing number of elements in the batch - N
            margin (float): MArgin value.
        Returns:
            None
        '''
        self.system_dict = squared_hinge(self.system_dict, margin=margin,
                                weight=weight, batch_axis=batch_axis);
        self.custom_print("Loss");
        self.custom_print("    Name:          {}".format(self.system_dict["hyper-parameters"]["loss"]["name"]));
        self.custom_print("    Params:        {}".format(self.system_dict["hyper-parameters"]["loss"]["params"]));
        self.custom_print("");
    ###############################################################################################################################################Classes
- class prototype_losses (verbose=1)
- 
Main class for all parameters in expert mode Args- verbose:- int
- Set verbosity levels 0 - Print Nothing 1 - Print desired details
 Expand source codeclass prototype_losses(prototype_optimizers): ''' Main class for all parameters in expert mode Args: verbose (int): Set verbosity levels 0 - Print Nothing 1 - Print desired details ''' def __init__(self, verbose=1): super().__init__(verbose=verbose); ############################################################################################################################################### def loss_l1(self, weight=None, batch_axis=0): ''' Select L1 Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N Returns: None ''' self.system_dict = l1(self.system_dict, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print(""); ############################################################################################################################################### ############################################################################################################################################### def loss_l2(self, weight=1.0, batch_axis=0): ''' Select L2 Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N Returns: None ''' self.system_dict = l2(self.system_dict, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print(""); ############################################################################################################################################### ############################################################################################################################################### def loss_crossentropy(self, weight=None, batch_axis=0, axis_to_sum_over=-1, label_as_categories=True, label_smoothing=False): ''' Select soaftmax crossentropy Loss - Auto softmax before applying loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N axis_to_sum_over (int): Set as -1 label_as_categories (bool): Fixed as True label_smoothing (bool): If True, label smoothning is applied. Returns: None ''' self.system_dict = crossentropy(self.system_dict, weight=weight, batch_axis=batch_axis, axis_to_sum_over=axis_to_sum_over, label_as_categories=label_as_categories, label_smoothing=label_smoothing); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print(""); ############################################################################################################################################### ############################################################################################################################################### def loss_binary_crossentropy(self, weight=None, batch_axis=0): ''' Select binary crossentropy Loss - Need to manually apply sigmoid Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N Returns: None ''' self.system_dict = binary_crossentropy(self.system_dict, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print(""); ############################################################################################################################################### ############################################################################################################################################### def loss_kldiv(self, log_pre_applied=False, weight=None, batch_axis=0, axis_to_sum_over=-1): ''' Select lkdiv Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N axis_to_sum_over (int): Set as -1 log_pre_applied (bool): If set as False, then logarithmic function is auto applied over target variables Returns: None ''' self.system_dict = kldiv(self.system_dict, weight=weight, batch_axis=batch_axis, axis_to_sum_over=axis_to_sum_over, log_pre_applied=log_pre_applied); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print(""); ############################################################################################################################################### ############################################################################################################################################### def loss_hinge(self, weight=None, batch_axis=0, margin=1): ''' Select hinge Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N margin (float): MArgin value. Returns: None ''' self.system_dict = hinge(self.system_dict, margin=margin, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print(""); ############################################################################################################################################### ############################################################################################################################################### def loss_squared_hinge(self, weight=None, batch_axis=0, margin=1): ''' Select Squared hinge Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N margin (float): MArgin value. Returns: None ''' self.system_dict = squared_hinge(self.system_dict, margin=margin, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print("");Ancestors- tf_keras_1.finetune.level_11_optimizers_main.prototype_optimizers
- tf_keras_1.finetune.level_10_schedulers_main.prototype_schedulers
- tf_keras_1.finetune.level_9_transforms_main.prototype_transforms
- tf_keras_1.finetune.level_8_layers_main.prototype_layers
- tf_keras_1.finetune.level_7_aux_main.prototype_aux
- tf_keras_1.finetune.level_6_params_main.prototype_params
- tf_keras_1.finetune.level_5_state_base.finetune_state
- tf_keras_1.finetune.level_4_evaluation_base.finetune_evaluation
- tf_keras_1.finetune.level_3_training_base.finetune_training
- tf_keras_1.finetune.level_2_model_base.finetune_model
- tf_keras_1.finetune.level_1_dataset_base.finetune_dataset
- system.base_class.system
 Methods- def loss_binary_crossentropy(self, weight=None, batch_axis=0)
- 
Select binary crossentropy Loss - Need to manually apply sigmoid Args- weight:- float
- global scalar for weight loss
- batch_axis:- int
- Axis representing number of elements in the batch - N
 Returns- None
 Expand source codedef loss_binary_crossentropy(self, weight=None, batch_axis=0): ''' Select binary crossentropy Loss - Need to manually apply sigmoid Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N Returns: None ''' self.system_dict = binary_crossentropy(self.system_dict, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print("");
- def loss_crossentropy(self, weight=None, batch_axis=0, axis_to_sum_over=-1, label_as_categories=True, label_smoothing=False)
- 
Select soaftmax crossentropy Loss - Auto softmax before applying loss Args- weight:- float
- global scalar for weight loss
- batch_axis:- int
- Axis representing number of elements in the batch - N
- axis_to_sum_over:- int
- Set as -1
- label_as_categories:- bool
- Fixed as True
- label_smoothing:- bool
- If True, label smoothning is applied.
 Returns- None
 Expand source codedef loss_crossentropy(self, weight=None, batch_axis=0, axis_to_sum_over=-1, label_as_categories=True, label_smoothing=False): ''' Select soaftmax crossentropy Loss - Auto softmax before applying loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N axis_to_sum_over (int): Set as -1 label_as_categories (bool): Fixed as True label_smoothing (bool): If True, label smoothning is applied. Returns: None ''' self.system_dict = crossentropy(self.system_dict, weight=weight, batch_axis=batch_axis, axis_to_sum_over=axis_to_sum_over, label_as_categories=label_as_categories, label_smoothing=label_smoothing); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print("");
- def loss_hinge(self, weight=None, batch_axis=0, margin=1)
- 
Select hinge Loss Args- weight:- float
- global scalar for weight loss
- batch_axis:- int
- Axis representing number of elements in the batch - N
- margin:- float
- MArgin value.
 Returns- None
 Expand source codedef loss_hinge(self, weight=None, batch_axis=0, margin=1): ''' Select hinge Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N margin (float): MArgin value. Returns: None ''' self.system_dict = hinge(self.system_dict, margin=margin, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print("");
- def loss_kldiv(self, log_pre_applied=False, weight=None, batch_axis=0, axis_to_sum_over=-1)
- 
Select lkdiv Loss Args- weight:- float
- global scalar for weight loss
- batch_axis:- int
- Axis representing number of elements in the batch - N
- axis_to_sum_over:- int
- Set as -1
- log_pre_applied:- bool
- If set as False, then logarithmic function is auto applied over target variables
 Returns- None
 Expand source codedef loss_kldiv(self, log_pre_applied=False, weight=None, batch_axis=0, axis_to_sum_over=-1): ''' Select lkdiv Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N axis_to_sum_over (int): Set as -1 log_pre_applied (bool): If set as False, then logarithmic function is auto applied over target variables Returns: None ''' self.system_dict = kldiv(self.system_dict, weight=weight, batch_axis=batch_axis, axis_to_sum_over=axis_to_sum_over, log_pre_applied=log_pre_applied); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print("");
- def loss_l1(self, weight=None, batch_axis=0)
- 
Select L1 Loss Args- weight:- float
- global scalar for weight loss
- batch_axis:- int
- Axis representing number of elements in the batch - N
 Returns- None
 Expand source codedef loss_l1(self, weight=None, batch_axis=0): ''' Select L1 Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N Returns: None ''' self.system_dict = l1(self.system_dict, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print("");
- def loss_l2(self, weight=1.0, batch_axis=0)
- 
Select L2 Loss Args- weight:- float
- global scalar for weight loss
- batch_axis:- int
- Axis representing number of elements in the batch - N
 Returns- None
 Expand source codedef loss_l2(self, weight=1.0, batch_axis=0): ''' Select L2 Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N Returns: None ''' self.system_dict = l2(self.system_dict, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print("");
- def loss_squared_hinge(self, weight=None, batch_axis=0, margin=1)
- 
Select Squared hinge Loss Args- weight:- float
- global scalar for weight loss
- batch_axis:- int
- Axis representing number of elements in the batch - N
- margin:- float
- MArgin value.
 Returns- None
 Expand source codedef loss_squared_hinge(self, weight=None, batch_axis=0, margin=1): ''' Select Squared hinge Loss Args: weight (float): global scalar for weight loss batch_axis (int): Axis representing number of elements in the batch - N margin (float): MArgin value. Returns: None ''' self.system_dict = squared_hinge(self.system_dict, margin=margin, weight=weight, batch_axis=batch_axis); self.custom_print("Loss"); self.custom_print(" Name: {}".format(self.system_dict["hyper-parameters"]["loss"]["name"])); self.custom_print(" Params: {}".format(self.system_dict["hyper-parameters"]["loss"]["params"])); self.custom_print("");