Activation Layers#
Module |
MAdds |
FLOPs |
Parameters |
Memory |
---|---|---|---|---|
torch.nn.ELU |
x |
✓ |
NA |
✓ |
torch.nn.Hardshrink |
x |
x |
NA |
x |
torch.nn.Hardsigmoid |
x |
x |
NA |
x |
torch.nn.Hardtanh |
x |
x |
NA |
x |
torch.nn.Hardswish |
x |
x |
NA |
x |
torch.nn.LeakyReLU |
x |
✓ |
NA |
✓ |
torch.nn.LogSigmoid |
x |
x |
NA |
x |
torch.nn.PReLU |
x |
✓ |
NA |
✓ |
torch.nn.ReLU |
✓ |
✓ |
NA |
✓ |
torch.nn.ReLU6 |
✓ |
✓ |
NA |
✓ |
torch.nn.RReLU |
x |
x |
NA |
x |
torch.nn.SELU |
x |
x |
NA |
x |
torch.nn.CELU |
x |
x |
NA |
x |
torch.nn.GELU |
x |
x |
NA |
x |
torch.nn.Sigmoid |
x |
x |
NA |
x |
torch.nn.SiLU |
x |
x |
NA |
x |
torch.nn.Mish |
x |
x |
NA |
x |
torch.nn.Softplus |
x |
x |
NA |
x |
torch.nn.Softshrink |
x |
x |
NA |
x |
torch.nn.Softsign |
x |
x |
NA |
x |
torch.nn.Tanh |
x |
x |
NA |
x |
torch.nn.Tanhshrink |
x |
x |
NA |
x |
torch.nn.Threshold |
x |
x |
NA |
x |
torch.nn.GLU |
x |
x |
NA |
x |
torch.nn.Softmin |
x |
x |
NA |
x |
torch.nn.Softmax |
✓ |
x |
NA |
x |
torch.nn.Softmax2d |
x |
x |
NA |
x |
torch.nn.LogSoftmax |
x |
x |
NA |
x |