1# Licensed to the Apache Software Foundation (ASF) under one
2# or more contributor license agreements.  See the NOTICE file
3# distributed with this work for additional information
4# regarding copyright ownership.  The ASF licenses this file
5# to you under the Apache License, Version 2.0 (the
6# "License"); you may not use this file except in compliance
7# with the License.  You may obtain a copy of the License at
8#
9#   http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing,
12# software distributed under the License is distributed on an
13# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14# KIND, either express or implied.  See the License for the
15# specific language governing permissions and limitations
16# under the License.
17
18package AI::MXNet::Gluon::ModelZoo::Vision::DenseNet;
19use strict;
20use warnings;
21use AI::MXNet::Base;
22use AI::MXNet::Function::Parameters;
23use AI::MXNet::Gluon::Mouse;
24extends 'AI::MXNet::Gluon::HybridBlock';
25
26func _make_dense_block($num_layers, $bn_size, $growth_rate, $dropout, $stage_index)
27{
28    my $out = nn->HybridSequential(prefix=>"stage${stage_index}_");
29    $out->name_scope(sub {
30        for(1..$num_layers)
31        {
32            $out->add(_make_dense_layer($growth_rate, $bn_size, $dropout));
33        }
34    });
35    return $out;
36}
37
38func _make_dense_layer($growth_rate, $bn_size, $dropout)
39{
40    my $new_features = nn->HybridSequential(prefix=>'');
41    $new_features->add(nn->BatchNorm());
42    $new_features->add(nn->Activation('relu'));
43    $new_features->add(nn->Conv2D($bn_size * $growth_rate, kernel_size=>1, use_bias=>0));
44    $new_features->add(nn->BatchNorm());
45    $new_features->add(nn->Activation('relu'));
46    $new_features->add(nn->Conv2D($growth_rate, kernel_size=>3, padding=>1, use_bias=>0));
47    if($dropout)
48    {
49        $new_features->add(nn->Dropout($dropout));
50    }
51
52    my $out = nn->HybridConcurrent(axis=>1, prefix=>'');
53    $out->add(nn->Identity());
54    $out->add($new_features);
55
56    return $out;
57}
58
59func _make_transition($num_output_features)
60{
61    my $out = nn->HybridSequential(prefix=>'');
62    $out->add(nn->BatchNorm());
63    $out->add(nn->Activation('relu'));
64    $out->add(nn->Conv2D($num_output_features, kernel_size=>1, use_bias=>0));
65    $out->add(nn->AvgPool2D(pool_size=>2, strides=>2));
66    return $out;
67}
68
69=head1 NAME
70
71    AI::MXNet::Gluon::ModelZoo::Vision::DenseNet - Densenet-BC model from the "Densely Connected Convolutional Networks"
72=cut
73
74=head1 DESCRIPTION
75
76    Densenet-BC model from the "Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf> paper.
77
78    Parameters
79    ----------
80    num_init_features : Int
81        Number of filters to learn in the first convolution layer.
82    growth_rate : Int
83        Number of filters to add each layer (`k` in the paper).
84    block_config : array ref of Int
85        List of integers for numbers of layers in each pooling block.
86    bn_size : Int, default 4
87        Multiplicative factor for number of bottle neck layers.
88        (i.e. bn_size * k features in the bottleneck layer)
89    dropout : float, default 0
90        Rate of dropout after each dense layer.
91    classes : int, default 1000
92        Number of classification classes.
93=cut
94has [qw/num_init_features
95        growth_rate/] => (is => 'ro', isa => 'Int', required => 1);
96has 'block_config'    => (is => 'ro', isa => 'ArrayRef[Int]', required => 1);
97has 'bn_size'         => (is => 'ro', isa => 'Int', default => 4);
98has 'dropout'         => (is => 'ro', isa => 'Num', default => 0);
99has 'classes'         => (is => 'ro', isa => 'Int', default => 1000);
100method python_constructor_arguments(){ [qw/num_init_features growth_rate block_config bn_size dropout classes/] }
101
102sub BUILD
103{
104    my $self = shift;
105    $self->name_scope(sub {
106        $self->features(nn->HybridSequential(prefix=>''));
107        $self->features->add(
108            nn->Conv2D(
109                $self->num_init_features, kernel_size=>7,
110                strides=>2, padding=>3, use_bias=>0
111            )
112        );
113        $self->features->add(nn->BatchNorm());
114        $self->features->add(nn->Activation('relu'));
115        $self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2, padding=>1));
116        # Add dense blocks
117        my $num_features = $self->num_init_features;
118        for(enumerate($self->block_config))
119        {
120            my ($i, $num_layers) = @$_;
121            $self->features->add(_make_dense_block($num_layers, $self->bn_size, $self->growth_rate, $self->dropout, $i+1));
122            $num_features += $num_layers * $self->growth_rate;
123            if($i != @{ $self->block_config } - 1)
124            {
125                $self->features->add(_make_transition(int($num_features/2)));
126                $num_features = int($num_features/2);
127            }
128        }
129        $self->features->add(nn->BatchNorm());
130        $self->features->add(nn->Activation('relu'));
131        $self->features->add(nn->AvgPool2D(pool_size=>7));
132        $self->features->add(nn->Flatten());
133
134        $self->output(nn->Dense($self->classes));
135    });
136}
137
138method hybrid_forward(GluonClass $F, GluonInput $x)
139{
140    $x = $self->features->($x);
141    $x = $self->output->($x);
142    return $x;
143}
144
145package AI::MXNet::Gluon::ModelZoo::Vision;
146
147my %densenet_spec = (
148    121 => [64, 32, [6, 12, 24, 16]],
149    161 => [96, 48, [6, 12, 36, 24]],
150    169 => [64, 32, [6, 12, 32, 32]],
151    201 => [64, 32, [6, 12, 48, 32]]
152);
153
154=head2 get_densenet
155
156    Densenet-BC model from the
157    "Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf> paper.
158
159    Parameters
160    ----------
161    $num_layers : Int
162        Number of layers for the variant of densenet. Options are 121, 161, 169, 201.
163    :$pretrained : Bool, default 0
164        Whether to load the pretrained weights for model.
165    :$ctx : AI::MXNet::Context, default CPU
166        The context in which to load the pretrained weights.
167    :$root : Str, default '~/.mxnet/models'
168        Location for keeping the model parameters.
169=cut
170
171method get_densenet(
172    Int $num_layers, Bool :$pretrained=0, :$ctx=AI::MXNet::Context->cpu(),
173    :$root='~/.mxnet/models',
174    Int :$bn_size=4,
175    Num :$dropout=0,
176    Int :$classes=1000
177)
178{
179    my ($num_init_features, $growth_rate, $block_config) = @{ $densenet_spec{$num_layers} };
180    my $net = AI::MXNet::Gluon::ModelZoo::Vision::DenseNet->new(
181        $num_init_features, $growth_rate, $block_config,
182        $bn_size, $dropout, $classes
183    );
184    if($pretrained)
185    {
186        $net->load_parameters(
187            AI::MXNet::Gluon::ModelZoo::ModelStore->get_model_file(
188                "densenet$num_layers",
189                root=>$root
190            ),
191            ctx=>$ctx
192        );
193    }
194    return $net;
195}
196
197=head2 densenet121
198
199    Densenet-BC 121-layer model from the
200    "Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf> paper.
201
202    Parameters
203    ----------
204    :$pretrained : Bool, default 0
205        Whether to load the pretrained weights for model.
206    :$ctx : AI::MXNet::Context, default CPU
207        The context in which to load the pretrained weights.
208    :$root : Str, default '~/.mxnet/models'
209        Location for keeping the model parameters.
210=cut
211
212method densenet121(%kwargs)
213{
214    return __PACKAGE__->get_densenet(121, %kwargs)
215}
216
217=head2 densenet161
218
219    Densenet-BC 161-layer model from the
220    "Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf> paper.
221
222    Parameters
223    ----------
224    :$pretrained : Bool, default 0
225        Whether to load the pretrained weights for model.
226    :$ctx : AI::MXNet::Context, default CPU
227        The context in which to load the pretrained weights.
228    :$root : Str, default '~/.mxnet/models'
229        Location for keeping the model parameters.
230=cut
231
232method densenet161(%kwargs)
233{
234    return __PACKAGE__->get_densenet(161, %kwargs)
235}
236
237=head2 densenet169
238
239    Densenet-BC 169-layer model from the
240    "Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf> paper.
241
242    Parameters
243    ----------
244    :$pretrained : Bool, default 0
245        Whether to load the pretrained weights for model.
246    :$ctx : AI::MXNet::Context, default CPU
247        The context in which to load the pretrained weights.
248    :$root : Str, default '~/.mxnet/models'
249        Location for keeping the model parameters.
250=cut
251
252method densenet169(%kwargs)
253{
254    return __PACKAGE__->get_densenet(169, %kwargs)
255}
256
257=head2 densenet201
258
259    Densenet-BC 201-layer model from the
260    "Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf> paper.
261
262    Parameters
263    ----------
264    :$pretrained : Bool, default 0
265        Whether to load the pretrained weights for model.
266    :$ctx : AI::MXNet::Context, default CPU
267        The context in which to load the pretrained weights.
268    :$root : Str, default '~/.mxnet/models'
269        Location for keeping the model parameters.
270=cut
271
272method densenet201(%kwargs)
273{
274    return __PACKAGE__->get_densenet(201, %kwargs)
275}
276
2771;
278