Blocks.
Base blocks for Resnet - Basic and Bottleneck Blocks.
BasicBlock
block = BasicBlock(64, 64)
block
output
BasicBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): ReLU(inplace=True) )
BottleneckBlock
block = BottleneckBlock(64, 64, dw=True)
block
output
BottleneckBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(64, 16, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(16, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=16, bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_2): ConvBnAct( (conv): Conv2d(16, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): ReLU(inplace=True) )
block = BottleneckBlock(64, 64, groups=4)
block
output
BottleneckBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(64, 16, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(16, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=4, bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_2): ConvBnAct( (conv): Conv2d(16, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): ReLU(inplace=True) )
block = BottleneckBlock(64, 64, expansion=2, act_fn=nn.LeakyReLU, bn_1st=False)
block
output
BottleneckBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(64, 32, kernel_size=(1, 1), stride=(1, 1), bias=False) (act_fn): LeakyReLU(negative_slope=0.01, inplace=True) (bn): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) (conv_1): ConvBnAct( (conv): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (act_fn): LeakyReLU(negative_slope=0.01, inplace=True) (bn): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) (conv_2): ConvBnAct( (conv): Conv2d(32, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): LeakyReLU(negative_slope=0.01, inplace=True) )
lock = BottleneckBlock(32, 64, expansion=2, dw=True)
block
output
BottleneckBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(64, 32, kernel_size=(1, 1), stride=(1, 1), bias=False) (act_fn): LeakyReLU(negative_slope=0.01, inplace=True) (bn): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) (conv_1): ConvBnAct( (conv): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (act_fn): LeakyReLU(negative_slope=0.01, inplace=True) (bn): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) (conv_2): ConvBnAct( (conv): Conv2d(32, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): LeakyReLU(negative_slope=0.01, inplace=True) )
pool = partial(nn.AvgPool2d, kernel_size=2, ceil_mode=True)
block = BottleneckBlock(32, 64, stride=2, dw=True, pool=pool)
block
output
BottleneckBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(16, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=16, bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_2): ConvBnAct( (conv): Conv2d(16, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (id_conv): Sequential( (pool): AvgPool2d(kernel_size=2, stride=2, padding=0) (id_conv): ConvBnAct( (conv): Conv2d(32, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): ReLU(inplace=True) )
from model_constructor.layers import SEModule, SimpleSelfAttention
block = BottleneckBlock(32, 64, stride=2, dw=True, pool=pool, se=SEModule)
block
output
BottleneckBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(16, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=16, bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_2): ConvBnAct( (conv): Conv2d(16, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) (se): SEModule( (squeeze): AdaptiveAvgPool2d(output_size=1) (excitation): Sequential( (reduce): Linear(in_features=64, out_features=4, bias=True) (se_act): ReLU(inplace=True) (expand): Linear(in_features=4, out_features=64, bias=True) (se_gate): Sigmoid() ) ) ) (id_conv): Sequential( (pool): AvgPool2d(kernel_size=2, stride=2, padding=0) (id_conv): ConvBnAct( (conv): Conv2d(32, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): ReLU(inplace=True) )
block = BottleneckBlock(32, 64, stride=2, dw=True, pool=pool, se=SEModule, sa=SimpleSelfAttention)
block
output
BottleneckBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(16, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=16, bias=False) (bn): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): ReLU(inplace=True) ) (conv_2): ConvBnAct( (conv): Conv2d(16, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) (se): SEModule( (squeeze): AdaptiveAvgPool2d(output_size=1) (excitation): Sequential( (reduce): Linear(in_features=64, out_features=4, bias=True) (se_act): ReLU(inplace=True) (expand): Linear(in_features=4, out_features=64, bias=True) (se_gate): Sigmoid() ) ) (sa): SimpleSelfAttention( (conv): Conv1d(64, 64, kernel_size=(1,), stride=(1,), bias=False) ) ) (id_conv): Sequential( (pool): AvgPool2d(kernel_size=2, stride=2, padding=0) (id_conv): ConvBnAct( (conv): Conv2d(32, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): ReLU(inplace=True) )
model_constructor by ayasyrev