Mila
Deep Neural Network Library
Loading...
Searching...
No Matches
Dnn.Modules.LayerNorm Module Reference

Exported Modules

module  Compute.MemoryResource
 
module  Compute.UnaryOperation
 
module  Serialization.ModelArchive
 
module  Compute.ComputeDevice
 
module  Compute.CudaMemoryResource
 
module  Compute.DeviceContext
 
module  Compute.OperationAttributes
 
module  Dnn.Tensor
 
module  Dnn.TensorTraits
 
module  Compute.OperationRegistry
 
module  Compute.Precision
 
module  Dnn.Module
 
module  Compute.DeviceType
 
module  Compute.CpuMemoryResource
 
module  Compute.OperationBase
 

Classes

class  Mila::Dnn::LayerNorm< TDeviceType, TInput, TOutput >
 Layer Normalization module. More...
 
class  Mila::Dnn::LayerNormConfig
 Configuration class for Layer Normalization module. More...
 

Typedefs

template<typename TInput = float, typename TOutput = TInput>
using Mila::Dnn::CpuLayerNorm = LayerNorm< DeviceType::Cpu, TInput, TOutput >
 Type alias for CPU-based layer normalization module with customizable tensor types.
 
template<typename TInput = float, typename TOutput = TInput>
using Mila::Dnn::CudaLayerNorm = LayerNorm< DeviceType::Cuda, TInput, TOutput >
 Type alias for CUDA-based layer normalization module with customizable tensor types.
 
using ModuleBase = Module< TDeviceType, TInput, TOutput >
 Alias for base module type.
 
using MR = std::conditional_t< TDeviceType==DeviceType::Cuda, CudaMemoryResource, CpuMemoryResource >
 Memory resource type used for tensors, selected based on device type.
 

Functions

 LayerNorm (const std::string &device_name, const LayerNormConfig &config)
 Constructs a new LayerNorm module with a device name.
 
 LayerNorm (std::shared_ptr< DeviceContext > device_context, const LayerNormConfig &config)
 Constructs a new LayerNorm module with a provided device context.
 
void backward (const Tensor< TInput, MR > &input, const Tensor< TOutput, MR > &output_grad, Tensor< TInput, MR > &input_grad)
 Performs the backward pass of the Layer Normalization operation.
 
void createOperation ()
 Creates the appropriate Layer Normalization operation based on the current device context.
 
void forward (const Tensor< TInput, MR > &input, Tensor< TOutput, MR > &output)
 Performs the forward pass of the Layer Normalization operation.
 
std::shared_ptr< Tensor< TInput, MR > > getBias ()
 Gets the bias tensor used after normalization and scaling.
 
std::shared_ptr< Tensor< TInput, MR > > getWeight ()
 Gets the weight tensor used for scaling after normalization.
 
bool hasBias () const
 Gets whether the module has a bias tensor.
 
void initializeTensors ()
 Initializes the tensors needed for the Layer Normalization operation.
 
void load (ModelArchive &archive) override
 Deserializes the module state from a ZIP archive.
 
size_t parameterCount () const override
 Gets the number of trainable parameters in this module.
 
void save (ModelArchive &archive) const override
 Serializes the module state to a ZIP archive.
 
std::string toString () const override
 Generates a string representation of this module's configuration.
 

Variables

std::shared_ptr< Tensor< TOutput, MR > > bias_ { nullptr }
 The bias tensor added after normalization and scaling.
 
LayerNormConfig config_
 Configuration for the LayerNorm module.
 
std::shared_ptr< Tensor< TOutput, MR > > mean_ { nullptr }
 The mean tensor used for normalization.
 
std::shared_ptr< UnaryOperation< TDeviceType, TInput, TOutput > > operation_ { nullptr }
 The underlying operation that implements Layer Normalization.
 
std::vector< std::shared_ptr< Tensor< TOutput, MR > > > output_state_
 Collection of output state tensors for caching.
 
std::vector< std::shared_ptr< Tensor< TOutput, MR > > > parameters_
 Collection of trainable parameters for this module.
 
OperationAttributes properties_
 Operation attributes and configuration.
 
std::shared_ptr< Tensor< TOutput, MR > > rstd_ { nullptr }
 The reciprocal standard deviation tensor.
 
std::shared_ptr< Tensor< TOutput, MR > > weight_ { nullptr }
 The weight tensor for scaling after normalization.
 

Files

file  /home/runner/work/Mila/Mila/Mila/Src/Dnn/Modules/Normalization/LayerNorm.ixx
 Implementation of Layer Normalization module for neural networks.
 
file  /home/runner/work/Mila/Mila/Mila/Src/Dnn/Modules/Normalization/LayerNormConfig.ixx
 Configuration interface for the Layer Normalization module in the Mila DNN framework.