XResNet.
xResNet model.
XResNet constructor.
xresnet = ModelConstructor(
name='XResNet',
make_stem=xresnet_stem,
stem_sizes=[3, 32, 64, 64],
act_fn=torch.nn.Mish,
)
xresnet
output
XResNet in_chans: 3, num_classes: 1000 expansion: 1, groups: 1, dw: False, div_groups: None act_fn: Mish, sa: False, se: False stem sizes: [3, 32, 64, 64], stride on 0 body sizes [64, 128, 256, 512] layers: [2, 2, 2, 2]
xresnet.print_changed_fields()
output
Changed fields: name: XResNet act_fn: Mish stem_sizes: [3, 32, 64, 64] make_stem: xresnet_stem
xresnet.stem
/home/aya/mambaforge/envs/mc/lib/python3.10/site-packages/torch/nn/modules/conv.py:137: UserWarning: Failed to initialize NumPy: No module named 'numpy' (Triggered internally at ../torch/csrc/utils/tensor_numpy.cpp:84.)
self.weight = Parameter(torch.empty(
output
Sequential( (conv_0): ConvBnAct( (conv): Conv2d(3, 3, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (bn): BatchNorm2d(3, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(3, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_2): ConvBnAct( (conv): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_3): ConvBnAct( (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (stem_pool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False) )
xresnet.body
output
Sequential( (l_0): Sequential( (bl_0): BasicBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): Mish(inplace=True) ) (bl_1): BasicBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): Mish(inplace=True) ) ) (l_1): Sequential( (bl_0): BasicBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (bn): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (id_conv): Sequential( (id_conv): ConvBnAct( (conv): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False) (bn): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): Mish(inplace=True) ) (bl_1): BasicBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): Mish(inplace=True) ) ) (l_2): Sequential( (bl_0): BasicBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (id_conv): Sequential( (id_conv): ConvBnAct( (conv): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False) (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): Mish(inplace=True) ) (bl_1): BasicBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): Mish(inplace=True) ) ) (l_3): Sequential( (bl_0): BasicBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (id_conv): Sequential( (id_conv): ConvBnAct( (conv): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False) (bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): Mish(inplace=True) ) (bl_1): BasicBlock( (convs): Sequential( (conv_0): ConvBnAct( (conv): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (act_fn): Mish(inplace=True) ) (conv_1): ConvBnAct( (conv): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (act_fn): Mish(inplace=True) ) ) )
xresnet.head
output
Sequential( (pool): AdaptiveAvgPool2d(output_size=1) (flat): Flatten(start_dim=1, end_dim=-1) (fc): Linear(in_features=512, out_features=1000, bias=True) )
xResNet constructors
Lets create constructor class for xResnet.
class XResNet(ModelConstructor):
make_stem: Callable[[ModelCfg], ModSeq] = xresnet_stem
stem_sizes: list[int] = [32, 32, 64]
pool: Optional[Callable[[Any], nn.Module]] = partial(
nn.AvgPool2d, kernel_size=2, ceil_mode=True
)
xResnet34 inherit from xResnet.
class XResNet34(XResNet):
layers: list[int] = [3, 4, 6, 3]
xResnet50 inherit from xResnet34.
class XResNet50(XResNet34):
block: type[nn.Module] = BottleneckBlock
block_sizes: list[int] = [256, 512, 1024, 2048]
Now we can create constructor from class adn change model parameters during initialization or after.
mc = XResNet34(num_classes=10)
mc
output
XResNet34 in_chans: 3, num_classes: 10 expansion: 1, groups: 1, dw: False, div_groups: None act_fn: ReLU, sa: False, se: False stem sizes: [32, 32, 64], stride on 0 body sizes [64, 128, 256, 512] layers: [3, 4, 6, 3]
mc = XResNet50()
mc
output
XResNet50 in_chans: 3, num_classes: 1000 expansion: 1, groups: 1, dw: False, div_groups: None act_fn: ReLU, sa: False, se: False stem sizes: [32, 32, 64], stride on 0 body sizes [256, 512, 1024, 2048] layers: [3, 4, 6, 3]
To create model - call model constructor object.
model = mc()
model_constructor by ayasyrev