neodroidvision.utilities.torch\_utilities.layers.torch\_layers.MinMaxNorm ========================================================================= .. currentmodule:: neodroidvision.utilities.torch_utilities.layers.torch_layers .. autoclass:: MinMaxNorm :members: :show-inheritance: :inherited-members: .. automethod:: __init__ .. rubric:: Methods .. autosummary:: ~MinMaxNorm.__init__ ~MinMaxNorm.add_module ~MinMaxNorm.apply ~MinMaxNorm.bfloat16 ~MinMaxNorm.buffers ~MinMaxNorm.children ~MinMaxNorm.cpu ~MinMaxNorm.cuda ~MinMaxNorm.double ~MinMaxNorm.eval ~MinMaxNorm.extra_repr ~MinMaxNorm.float ~MinMaxNorm.forward ~MinMaxNorm.get_buffer ~MinMaxNorm.get_extra_state ~MinMaxNorm.get_parameter ~MinMaxNorm.get_submodule ~MinMaxNorm.half ~MinMaxNorm.ipu ~MinMaxNorm.load_state_dict ~MinMaxNorm.modules ~MinMaxNorm.named_buffers ~MinMaxNorm.named_children ~MinMaxNorm.named_modules ~MinMaxNorm.named_parameters ~MinMaxNorm.parameters ~MinMaxNorm.register_backward_hook ~MinMaxNorm.register_buffer ~MinMaxNorm.register_forward_hook ~MinMaxNorm.register_forward_pre_hook ~MinMaxNorm.register_full_backward_hook ~MinMaxNorm.register_load_state_dict_post_hook ~MinMaxNorm.register_module ~MinMaxNorm.register_parameter ~MinMaxNorm.requires_grad_ ~MinMaxNorm.set_extra_state ~MinMaxNorm.share_memory ~MinMaxNorm.state_dict ~MinMaxNorm.to ~MinMaxNorm.to_empty ~MinMaxNorm.train ~MinMaxNorm.type ~MinMaxNorm.xpu ~MinMaxNorm.zero_grad .. rubric:: Attributes .. autosummary:: ~MinMaxNorm.T_destination ~MinMaxNorm.dump_patches ~MinMaxNorm.training