1# Licensed to the Apache Software Foundation (ASF) under one
2# or more contributor license agreements.  See the NOTICE file
3# distributed with this work for additional information
4# regarding copyright ownership.  The ASF licenses this file
5# to you under the Apache License, Version 2.0 (the
6# "License"); you may not use this file except in compliance
7# with the License.  You may obtain a copy of the License at
8#
9#   http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing,
12# software distributed under the License is distributed on an
13# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14# KIND, either express or implied.  See the License for the
15# specific language governing permissions and limitations
16# under the License.
17
18# pylint: disable=invalid-name, line-too-long
19"""
20Port of MxNet version of Densenet to Relay.
21https://github.com/apache/incubator-mxnet/blob/master/python/mxnet/gluon/model_zoo/vision/densenet.py
22"""
23# pylint: enable=line-too-long
24from tvm import relay
25from . import layers
26from .init import create_workload
27
28def _make_dense_layer(data, growth_rate, bn_size, index):
29    """Single densenet layer."""
30    bn1 = layers.batch_norm_infer(data, name="batch_1_%s" % index)
31    relu1 = relay.nn.relu(bn1)
32    conv1 = layers.conv2d(relu1, channels=bn_size * growth_rate,
33                          kernel_size=(1, 1), name="conv2d_1_%s" % index)
34    bn2 = layers.batch_norm_infer(conv1, name="batch_2_" + index)
35    relu2 = relay.nn.relu(bn2)
36    conv2 = layers.conv2d(relu2, channels=growth_rate, kernel_size=(3, 3),
37                          padding=(1, 1), name="conv2d_2_%s" % index)
38    return conv2
39
40def _make_dense_block(data, num_layers, bn_size, growth_rate, index):
41    """Makes a block of dense layers of the specified size."""
42    layer_out = data
43    for i in range(num_layers):
44        layer_out = _make_dense_layer(layer_out, growth_rate, bn_size,
45                                      "%s_%s" % (index, i))
46    return layer_out
47
48def _make_transition(data, num_output_features, index):
49    """Transition between layers."""
50    bn = layers.batch_norm_infer(data, name="batch_t_%s" % index)
51    relu = relay.nn.relu(bn)
52    conv = layers.conv2d(relu, channels=num_output_features,
53                         kernel_size=(1, 1), name="conv_t_%s" % index)
54    return relay.nn.avg_pool2d(conv, pool_size=(2, 2), strides=(2, 2))
55
56def _make_dense_net(num_init_features, growth_rate, block_config,
57                    data_shape, data_dtype, bn_size=4, classes=1000):
58    """Builds up a densenet."""
59    data = relay.Var("data", relay.TensorType(data_shape, data_dtype)) # (bn_size, 3, 224, 224)))
60    conv1 = layers.conv2d(data, channels=num_init_features,
61                          kernel_size=(7, 7), strides=(2, 2), padding=(3, 3),
62                          name='conv1')
63    bn1 = layers.batch_norm_infer(conv1, name='batch1')
64    relu1 = relay.nn.relu(bn1)
65    mp = relay.nn.max_pool2d(relu1, pool_size=(3, 3), strides=(2, 2), padding=(1, 1))
66
67    num_features = num_init_features
68    layer_out = mp
69    for i, num_layers in enumerate(block_config):
70        layer_out = _make_dense_block(layer_out, num_layers, growth_rate, bn_size, i)
71        num_features = num_features + num_layers*growth_rate
72        if i != len(block_config) - 1:
73            layer_out = _make_transition(layer_out, num_features // 2, i)
74            num_features = num_features // 2
75    bn2 = layers.batch_norm_infer(layer_out, name='batch2')
76    relu2 = relay.nn.relu(bn2)
77    avg = relay.nn.avg_pool2d(relu2, pool_size=(7, 7))
78    flat = relay.nn.batch_flatten(avg)
79
80    ret = layers.dense_add_bias(flat, units=classes, name='dense')
81
82    return relay.Function(relay.analysis.free_vars(ret), ret)
83
84def get_workload(densenet_size=121, classes=1000, batch_size=4,
85                 image_shape=(3, 224, 224), dtype='float32'):
86    """Gets benchmark workload for densenet.
87
88    Parameters
89    ----------
90    densenet_size : int, optional (default 121)
91        Parameter for the network size. The supported sizes
92        are 121, 161, 169, and 201.
93
94    classes : int, optional (default 1000)
95        The number of classes.
96
97    batch_size : int, optional (detault 4)
98        The batch size for the network.
99
100    image_shape : shape, optional (default (3, 224, 224))
101        The shape of the input data.
102
103    dtype : data type, optional (default 'float32')
104        The data type of the input data.
105
106    Returns
107    -------
108    mod: tvm.relay.Module
109        The relay module that contains a DenseNet network.
110
111    params : dict of str to NDArray
112        The benchmark paraeters.
113    """
114    specs = {121: (64, 32, [6, 12, 24, 16]),
115             161: (96, 48, [6, 12, 36, 24]),
116             169: (69, 32, [6, 12, 32, 32]),
117             201: (64, 32, [6, 12, 48, 32])}
118
119    num_init_features, growth_rate, block_config = specs[densenet_size]
120    data_shape = tuple([batch_size] + list(image_shape))
121    net = _make_dense_net(num_init_features, growth_rate, block_config,
122                          data_shape, dtype, batch_size, classes)
123    return create_workload(net)
124