caffe_op_mapper.py 47.2 KB
Newer Older
S
SunAhong1993 已提交
1
# Copyright (c) 2020  PaddlePaddle Authors. All Rights Reserved.
S
SunAhong1993 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

S
SunAhong1993 已提交
15
import sys
S
SunAhong1993 已提交
16 17 18
import numbers
import numpy as np
from x2paddle.core.util import *
S
SunAhong1993 已提交
19
from x2paddle.core.program import PaddleGraph
S
SunAhong1993 已提交
20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58
from x2paddle.decoder.caffe_decoder import CaffeGraphNode


def _adjust_parameters(node):
    data = node.data
    # When using the protobuf-backend, each parameter initially has four dimensions.
    # In certain cases (like FC layers), we want to eliminate the singleton dimensions.
    # This implementation takes care of the common cases. However, it does leave the
    # potential for future issues.
    # The Caffe-backend does not suffer from this problem.
    data = list(data)

    squeeze_indices = [1]  # Squeeze biases.
    if node.layer_type == 'InnerProduct':
        squeeze_indices.append(0)  # Squeeze FC.

    for idx in squeeze_indices:
        if idx >= len(data):
            continue

        d = data[idx]
        assert len(
            d.shape
        ) == 4, 'invalid shape[%s] from caffe when adjust_parameters' % (
            str(d.shape))

        shape_old = d.shape
        sq_axis = None
        if idx == 0:
            sq_axis = (0, 1)
        elif idx == 1:
            sq_axis = (0, 1, 2)
        else:
            continue

        data[idx] = np.squeeze(d, axis=sq_axis)
        shape_new = data[idx].shape
    return data

S
SunAhong1993 已提交
59

S
SunAhong1993 已提交
60
def _get_kernel_parameters(kind, params):
S
SunAhong1993 已提交
61 62 63
    assert kind in [
        "Convolution", "Pooling", "Deconvolution", "ConvolutionDepthwise"
    ]
S
SunAhong1993 已提交
64
    [k_h, k_w] = [1, 1]
65 66 67 68
    if params.kernel_h > 0 or params.kernel_w > 0:
        k_h = params.kernel_h
        k_w = params.kernel_w
    elif isinstance(params.kernel_size, numbers.Number):
S
SunAhong1993 已提交
69 70
        [k_h, k_w] = [params.kernel_size] * 2
    elif len(params.kernel_size) > 0:
S
SunAhong1993 已提交
71
        k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[0]
S
SunAhong1993 已提交
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88
        k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[
            len(params.kernel_size) - 1]
    [s_h, s_w] = [1, 1]
    if isinstance(params.stride, numbers.Number):
        [s_h, s_w] = [params.stride] * 2
    elif len(params.stride) > 0:
        s_h = params.stride_h if params.stride_h > 0 else params.stride[0]
        s_w = params.stride_w if params.stride_w > 0 else params.stride[len(
            params.stride) - 1]
    elif params.stride_h > 0 or params.stride_w > 0:
        s_h = params.stride_h
        s_w = params.stride_w
    [p_h, p_w] = [0, 0]
    if isinstance(params.pad, numbers.Number):
        [p_h, p_w] = [params.pad] * 2
    elif len(params.pad) > 0:
        p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
S
SunAhong1993 已提交
89 90
        p_w = params.pad_w if params.pad_w > 0 else params.pad[len(params.pad) -
                                                               1]
S
SunAhong1993 已提交
91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
    elif params.pad_h > 0 or params.pad_w > 0:
        p_h = params.pad_h
        p_w = params.pad_w
    dila_h = dila_w = 1
    group = 1
    c_o = 1
    if kind in ["Convolution", "Deconvolution", "ConvolutionDepthwise"]:
        if kind in ["Convolution", "Deconvolution"]:
            c_o = params.num_output
        dila_len = len(params.dilation)
        if dila_len == 2:
            dila_h = params.dilation[0]
            dila_w = params.dilation[1]
        elif dila_len == 1:
            dila_h = dila_w = params.dilation[0]
        else:
            assert dila_len == 0, "invalid length[%s] of dilation in convolution" % (
                dila_len)
    if kind in ['Convolution', 'Deconvolution']:
        group = params.group
    kernel = [k_h, k_w]
    stride = [s_h, s_w]
    pad = [p_h, p_w]
    dilation = [dila_h, dila_w]
    return c_o, kernel, stride, pad, dilation, group
S
SunAhong1993 已提交
116 117


S
SunAhong1993 已提交
118
class CaffeOpMapper():
S
SunAhong1993 已提交
119
    directly_map_ops = {
S
SunAhong1993 已提交
120 121
        'Sigmoid': ['paddle.nn.layer.Sigmoid'],
        'TanH': ['paddle.nn.Tanh'],
S
SunAhong1993 已提交
122 123 124 125
    }

    def __init__(self, decoder):
        self.graph = decoder.caffe_graph
S
SunAhong1993 已提交
126 127
        if not self.op_checker():
            raise Exception("Model is not supported yet.")
S
SunAhong1993 已提交
128
        self.params = dict()
S
SunAhong1993 已提交
129
        self.paddle_graph = PaddleGraph(parent_layer=None, source_type="caffe")
S
SunAhong1993 已提交
130
        self.paddle_graph.outputs = self.graph.output_nodes
S
SunAhong1993 已提交
131 132
        self.inputs_info = {}
        self.nn_name2id = {}
S
SunAhong1993 已提交
133 134 135 136 137 138 139 140
        print("Total nodes: {}".format(
            sum([
                isinstance(node, CaffeGraphNode)
                for name, node in self.graph.node_map.items()
            ])))
        print("Nodes converting ...")
        for i, node_name in enumerate(self.graph.topo_sort):
            sys.stderr.write("\rConverting node {} ...     ".format(i + 1))
S
SunAhong1993 已提交
141 142 143 144 145 146 147
            node = self.graph.get_node(node_name)
            op = node.layer_type
            if hasattr(self, op):
                func = getattr(self, op)
                func(node)
            elif op in self.directly_map_ops:
                self.directly_map(node)
S
SunAhong1993 已提交
148
        print("\nNodes converted.")
S
SunAhong1993 已提交
149 150 151
        self.paddle_graph.set_name(self.graph.graph_name)
        self.paddle_graph.set_parameters(self.params)
        self.paddle_graph.set_inputs_info(self.inputs_info)
S
SunAhong1993 已提交
152

S
SunAhong1993 已提交
153 154 155 156 157
    def op_checker(self):
        unsupported_ops = set()
        for node_name in self.graph.topo_sort:
            node = self.graph.get_node(node_name)
            op = node.layer_type
S
SunAhong1993 已提交
158
            if not hasattr(self, op) and op not in self.directly_map_ops:
S
SunAhong1993 已提交
159 160 161 162
                unsupported_ops.add(op)
        if len(unsupported_ops) == 0:
            return True
        else:
S
SunAhong1993 已提交
163
            if len(unsupported_ops) > 0:
S
SunAhong1993 已提交
164 165
                print("\n========= {} OPs are not supported yet ===========".
                      format(len(unsupported_ops)))
S
SunAhong1993 已提交
166
            for op in unsupported_ops:
S
SunAhong1993 已提交
167
                print("========== {} ============".format(op))
S
SunAhong1993 已提交
168
            return False
S
SunAhong1993 已提交
169

S
SunAhong1993 已提交
170 171 172 173 174 175 176 177 178 179 180 181 182 183 184
    def directly_map(self, node):
        inputs = node.layer.input
        assert len(inputs) == 1, 'directly_map error with multi inputs'
        op_info = self.directly_map_ops[node.layer_type]
        input = self.graph.get_input_node(node, 0)
        paddle_op = op_info[0]
        if paddle_op.startswith("paddle.nn"):
            op_name = paddle_op[10:].lower()
            op_name = name_generator(op_name, self.nn_name2id)
            output_name = node.name
            layer_outputs = [op_name, output_name]
            self.paddle_graph.add_layer(
                kernel=paddle_op,
                inputs={"x": input.name},
                outputs=layer_outputs)
S
SunAhong1993 已提交
185
        else:
S
SunAhong1993 已提交
186
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
187
                kernel=paddle_op, inputs={"x": input.name},
S
SunAhong1993 已提交
188
                outputs=[node.name])
S
SunAhong1993 已提交
189 190

    def Input(self, node):
S
SunAhong1993 已提交
191
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
192 193 194
            "paddle.to_tensor",
            inputs={},
            outputs=[node.layer_name],
S
SunAhong1993 已提交
195
            data=node.name)
S
SunAhong1993 已提交
196
        shape = list(node.layer.input_param.shape[0].dim)[1:]
S
SunAhong1993 已提交
197
        self.inputs_info[node.name] = [[-1] + shape,
S
SunAhong1993 已提交
198 199
                                                            "float32"]

S
SunAhong1993 已提交
200 201 202 203 204 205 206
    def MemoryData(self, node):
        params = node.layer.memory_data_param
        transform_params = node.layer.transform_param
        self.paddle_graph.add_layer(
            "paddle.to_tensor",
            inputs={},
            outputs=[node.layer_name],
S
SunAhong1993 已提交
207
            data=node.layer_name)
S
SunAhong1993 已提交
208 209 210 211 212 213 214 215 216
        shape = list()
        shape.append(params.batch_size)
        shape.append(params.channels)
        if hasattr(transform_params, "crop_size"):
            shape.append(transform_params.crop_size)
            shape.append(transform_params.crop_size)
        else:
            shape.append(params.width)
            shape.append(params.height)
S
SunAhong1993 已提交
217
        self.inputs_info[node.layer_name] = [shape, "float32"]
S
SunAhong1993 已提交
218 219

    def Convolution(self, node):
S
SunAhong1993 已提交
220
        conv2d_name = name_generator("conv", self.nn_name2id)
S
SunAhong1993 已提交
221 222 223 224
        output_name = node.layer_name
        layer_outputs = [conv2d_name, output_name]
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
225
        out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
226 227 228 229 230 231 232
            node.layer_type, params)
        if data is None:
            data = []
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
            data.append(
S
SunAhong1993 已提交
233 234 235
                np.zeros([
                    out_channel, node.in_shapes[0][1], kernel[0], kernel[1]
                ]).astype('float32'))
S
SunAhong1993 已提交
236 237
            data.append(np.zeros([out_channel, ]).astype('float32'))
        else:
S
SunAhong1993 已提交
238
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
239 240 241 242 243
        self.params[conv2d_name + ".weight"] = data[0]
        if len(data) == 2:
            self.params[conv2d_name + ".bias"] = data[1]
        assert len(node.inputs
                   ) == 1, "The count of Convolution node\'s input is not 1."
S
SunAhong1993 已提交
244
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
245
        layer_attrs = {
S
SunAhong1993 已提交
246
            "in_channels": node.in_shapes[0][1],
S
SunAhong1993 已提交
247 248 249 250 251 252 253 254 255
            "out_channels": out_channel,
            "kernel_size": kernel,
            "stride": stride,
            "padding": pad,
            "dilation": dilation,
            "groups": group
        }
        if len(data) == 1:
            layer_attrs["bias_attr"] = False
S
SunAhong1993 已提交
256
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
257
            "paddle.nn.Conv2D",
S
SunAhong1993 已提交
258
            inputs={"input": input.name},
S
SunAhong1993 已提交
259 260
            outputs=layer_outputs,
            **layer_attrs)
S
SunAhong1993 已提交
261

S
SunAhong1993 已提交
262 263 264
    def DepthwiseConvolution(self, node):
        node.layer_type = "ConvolutionDepthwise"
        self.ConvolutionDepthwise(node)
S
SunAhong1993 已提交
265 266

    def Deconvolution(self, node):
S
SunAhong1993 已提交
267
        conv2d_name = name_generator("conv", self.nn_name2id)
S
SunAhong1993 已提交
268 269 270 271
        output_name = node.layer_name
        layer_outputs = [conv2d_name, output_name]
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
272
        out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
273 274 275 276 277 278 279
            node.layer_type, params)
        if data is None:
            data = []
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
            data.append(
S
SunAhong1993 已提交
280 281 282
                np.zeros([
                    out_channel, node.in_shapes[0][1], kernel[0], kernel[1]
                ]).astype('float32'))
S
SunAhong1993 已提交
283 284
            data.append(np.zeros([out_channel, ]).astype('float32'))
        else:
S
SunAhong1993 已提交
285
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
286 287 288 289 290
        self.params[conv2d_name + ".weight"] = data[0]
        if len(data) == 2:
            self.params[conv2d_name + ".bias"] = data[1]
        assert len(node.inputs
                   ) == 1, "The count of Deconvolution node\'s input is not 1."
S
SunAhong1993 已提交
291
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
292
        layer_attrs = {
S
SunAhong1993 已提交
293
            "in_channels": node.in_shapes[0][1],
S
SunAhong1993 已提交
294 295 296 297 298 299 300 301 302
            "out_channels": out_channel,
            "kernel_size": kernel,
            "stride": stride,
            "padding": pad,
            "dilation": dilation,
            "groups": group
        }
        if len(data) == 1:
            layer_attrs["bias_attr"] = False
S
SunAhong1993 已提交
303
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
304
            "paddle.nn.Conv2DTranspose",
S
SunAhong1993 已提交
305
            inputs={"input": input.name},
S
SunAhong1993 已提交
306 307
            outputs=layer_outputs,
            **layer_attrs)
S
SunAhong1993 已提交
308

S
SunAhong1993 已提交
309
    def ConvolutionDepthwise(self, node):
S
SunAhong1993 已提交
310
        conv2d_name = name_generator("conv", self.nn_name2id)
S
SunAhong1993 已提交
311 312 313 314
        output_name = node.layer_name
        layer_outputs = [conv2d_name, output_name]
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
315
        out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
316
            node.layer_type, params)
S
SunAhong1993 已提交
317 318
        out_channel = params.num_output if params.num_output is not None else node.in_shapes[
            0][1]
S
SunAhong1993 已提交
319
        in_channel = node.in_shapes[0][1]
S
SunAhong1993 已提交
320 321 322
        group = int(in_channel / (
            in_channel / out_channel)) if in_channel > out_channel else int(
                in_channel / (out_channel / in_channel))
S
SunAhong1993 已提交
323 324 325 326 327 328
        if data is None:
            data = []
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
            data.append(
S
SunAhong1993 已提交
329 330 331
                np.zeros([
                    out_channel, node.in_shapes[0][1], kernel[0], kernel[1]
                ]).astype('float32'))
S
SunAhong1993 已提交
332 333
            data.append(np.zeros([out_channel, ]).astype('float32'))
        else:
S
SunAhong1993 已提交
334
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
335 336 337 338 339
        self.params[conv2d_name + ".weight"] = data[0]
        if len(data) == 2:
            self.params[conv2d_name + ".bias"] = data[1]
        assert len(node.inputs
                   ) == 1, "The count of Deconvolution node\'s input is not 1."
S
SunAhong1993 已提交
340
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
341 342 343 344 345 346 347 348 349 350 351
        layer_attrs = {
            "in_channels": in_channel,
            "out_channels": out_channel,
            "kernel_size": kernel,
            "stride": stride,
            "padding": pad,
            "dilation": dilation,
            "groups": group
        }
        if len(data) == 1:
            layer_attrs["bias_attr"] = False
S
SunAhong1993 已提交
352
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
353
            "paddle.nn.Conv2D",
S
SunAhong1993 已提交
354
            inputs={"input": input.name},
S
SunAhong1993 已提交
355 356 357 358
            outputs=layer_outputs,
            **layer_attrs)

    def Pooling(self, node):
S
SunAhong1993 已提交
359
        pool2d_name = name_generator("pool", self.nn_name2id)
S
SunAhong1993 已提交
360 361 362
        output_name = node.layer_name
        layer_outputs = [pool2d_name, output_name]
        params = node.layer.pooling_param
S
SunAhong1993 已提交
363 364 365
        ceil_mode = getattr(params, "ceil_mode", True)
        if not hasattr(params, 'ceil_mode'):
            ceil_mode = True if getattr(params, "round_mode", 0) == 0 else False
S
SunAhong1993 已提交
366 367
        global_pool = getattr(params, "global_pooling", False)
        kernel_default = [1, 1]
S
SunAhong1993 已提交
368
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
369 370 371 372 373 374 375
            node.layer_type, params)
        if params.pool == 0:
            pool_type = "max"
        else:
            pool_type = "avg"
        assert len(
            node.inputs) == 1, "The count of Pooling node\'s input is not 1."
S
SunAhong1993 已提交
376
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
377 378 379 380
        if global_pool:
            if kernel[0] == 0:
                kernel = [1, 1]
            if params.pool == 0:
S
SunAhong1993 已提交
381
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
382
                    "paddle.nn.AdaptiveMaxPool2D",
S
SunAhong1993 已提交
383
                    inputs={"input": input.name},
S
SunAhong1993 已提交
384 385 386
                    outputs=layer_outputs,
                    output_size=kernel)
            else:
S
SunAhong1993 已提交
387
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
388
                    "paddle.nn.AdaptiveAvgPool2D",
S
SunAhong1993 已提交
389
                    inputs={"input": input.name},
S
SunAhong1993 已提交
390 391
                    outputs=layer_outputs,
                    output_size=kernel)
S
SunAhong1993 已提交
392
        else:
S
SunAhong1993 已提交
393
            layer_attrs = {
S
SunAhong1993 已提交
394 395 396
                'kernel_size': kernel,
                'stride': stride,
                'padding': pad,
S
SunAhong1993 已提交
397 398
                'ceil_mode': ceil_mode,
            }
S
SunAhong1993 已提交
399 400 401 402 403 404 405 406 407 408 409 410
            if params.pool == 0:
                self.paddle_graph.add_layer(
                    "paddle.nn.MaxPool2D",
                    inputs={"input": input.name},
                    outputs=layer_outputs,
                    **layer_attrs)
            else:
                self.paddle_graph.add_layer(
                    "paddle.nn.AvgPool2D",
                    inputs={"input": input.name},
                    outputs=layer_outputs,
                    **layer_attrs)
S
SunAhong1993 已提交
411 412

    def LRN(self, node):
S
SunAhong1993 已提交
413 414 415
        lrn_name = name_generator("lrn", self.nn_name2id)
        output_name = node.layer_name
        layer_outputs = [lrn_name, output_name]
S
SunAhong1993 已提交
416
        assert len(node.inputs) == 1, "The count of LRN node\'s input is not 1."
S
SunAhong1993 已提交
417
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
418 419 420 421
        params = node.layer.lrn_param
        assert params.local_size % 2 == 1
        alpha = params.alpha / float(params.local_size)
        layer_attrs = {
S
fix  
SunAhong1993 已提交
422 423
            "n": params.local_size,
            "k": params.k,
S
SunAhong1993 已提交
424
            "alpha": alpha,
S
fix  
SunAhong1993 已提交
425
            "beta": params.beta,
S
SunAhong1993 已提交
426
        }
S
SunAhong1993 已提交
427
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
428
            "paddle.fluid.layers.lrn",
S
SunAhong1993 已提交
429
            inputs={"input": input.name},
S
fix  
SunAhong1993 已提交
430
            outputs=[node.layer_name],
S
SunAhong1993 已提交
431 432 433
            **layer_attrs)

    def InnerProduct(self, node):
S
SunAhong1993 已提交
434
        linear_name = name_generator("linear", self.nn_name2id)
S
SunAhong1993 已提交
435 436 437
        output_name = node.layer_name
        layer_outputs = [linear_name, output_name]
        data = node.data
S
SunAhong1993 已提交
438
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
439 440 441 442 443 444 445
        params = node.layer.inner_product_param
        if data is None:
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0."
                .format(node.layer_name, node.layer_type))
            data = []
            data.append(
S
SunAhong1993 已提交
446 447
                np.zeros([node.in_shapes[0][1], params.num_output]).astype(
                    "float32").astype("float32"))
S
SunAhong1993 已提交
448
            data.append(
S
SunAhong1993 已提交
449 450
                np.zeros([params.num_output]).astype("float32").astype(
                    "float32"))
S
SunAhong1993 已提交
451
        else:
S
SunAhong1993 已提交
452
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470
            # Reshape the parameters to Paddle's ordering
            transpose_order = (1, 0)
            w = data[0]
            fc_shape = w.shape
            output_channels = fc_shape[0]
            w = w.reshape((output_channels, -1))
            w = w.transpose(transpose_order)
            data[0] = w

        self.params[linear_name + ".weight"] = data[0]
        if len(data) == 2:
            self.params[linear_name + ".bias"] = data[1]
        assert len(node.inputs
                   ) == 1, "The count of InnerProduct node\'s input is not 1."
        assert params.axis == 1
        assert params.bias_term == True
        layer_attrs = {
            "in_features": data[0].shape[0],
S
SunAhong1993 已提交
471
            "out_features": params.num_output
S
SunAhong1993 已提交
472 473 474
        }
        if len(data) == 1:
            layer_attrs["bias"] = False
S
SunAhong1993 已提交
475
        if node.in_shapes[0][-1] != data[0].shape[0]:
S
SunAhong1993 已提交
476
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
477
                "paddle.reshape",
S
SunAhong1993 已提交
478
                inputs={"x": input.name},
S
SunAhong1993 已提交
479 480
                outputs=[output_name],
                shape=[-1, data[0].shape[0]])
S
SunAhong1993 已提交
481
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
482 483 484 485 486
                "paddle.nn.Linear",
                inputs={"input": output_name},
                outputs=layer_outputs,
                **layer_attrs)
        else:
S
SunAhong1993 已提交
487
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
488
                "paddle.nn.Linear",
S
SunAhong1993 已提交
489
                inputs={"input": input.name},
S
SunAhong1993 已提交
490 491
                outputs=layer_outputs,
                **layer_attrs)
S
SunAhong1993 已提交
492

S
SunAhong1993 已提交
493 494 495 496
    def AbsVal(self, node):
        assert len(
            node.inputs
        ) >= 1, "The count of AbsVal node\'s input is not more than 1."
S
SunAhong1993 已提交
497
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
498
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
499
            "paddle.abs",
S
SunAhong1993 已提交
500
            inputs={"input": input.name},
S
SunAhong1993 已提交
501 502 503
            outputs=[node.layer_name])

    def Softmax(self, node):
S
SunAhong1993 已提交
504
        softmax_name = name_generator("softmax", self.nn_name2id)
S
SunAhong1993 已提交
505 506 507 508
        output_name = node.layer_name
        layer_outputs = [softmax_name, output_name]
        assert len(
            node.inputs) == 1, "The count of Softmax node\'s input is not 1."
S
SunAhong1993 已提交
509
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
510 511
        params = node.layer.softmax_param
        axis = params.axis
S
SunAhong1993 已提交
512
        shape = node.in_shapes[0]
S
SunAhong1993 已提交
513 514 515
        dims = len(shape)
        axis = axis + dims if axis < 0 else axis
        layer_attrs = {'axis': axis}
S
SunAhong1993 已提交
516
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
517
            "paddle.nn.Softmax",
S
SunAhong1993 已提交
518
            inputs={"input": input.name},
S
SunAhong1993 已提交
519 520 521 522 523 524
            outputs=layer_outputs,
            **layer_attrs)

    def Slice(self, node):
        assert len(
            node.inputs) == 1, "The count of Slice node\'s input is not 1."
S
SunAhong1993 已提交
525
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
526 527 528 529 530 531
        top_len = len(node.layer.top)
        params = node.layer.slice_param
        axis = params.axis
        slice_dim = params.slice_dim
        if slice_dim != 1 and axis == 1:
            axis = slice_dim
S
SunAhong1993 已提交
532
        output_shape = node.out_shapes
S
SunAhong1993 已提交
533 534 535
        sections_list = list()
        outputs_list = list()
        for i, s in enumerate(output_shape):
S
SunAhong1993 已提交
536
            sections_list.append(s[axis])
S
SunAhong1993 已提交
537
            outputs_list.append("{}_p{}".format(node.layer_name, i))
S
SunAhong1993 已提交
538 539
        layer_attrs = {
            'num_or_sections': sections_list,
S
SunAhong1993 已提交
540
            'axis': axis,
S
SunAhong1993 已提交
541
        }
S
SunAhong1993 已提交
542
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
543
            "paddle.split",
S
SunAhong1993 已提交
544
            inputs={"x": input.name},
S
SunAhong1993 已提交
545
            outputs=outputs_list,
S
SunAhong1993 已提交
546 547 548 549 550 551
            **layer_attrs)

    def Concat(self, node):
        assert len(
            node.inputs
        ) >= 1, "The count of Concat node\'s input is not more than 1."
S
SunAhong1993 已提交
552
        inputs_list = list()
S
SunAhong1993 已提交
553
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
554 555
            input = self.graph.get_input_node(node, idx=i, copy=True)
            inputs_list.append(input.name)
S
SunAhong1993 已提交
556 557 558
        params = node.layer.concat_param
        axis = params.axis
        layer_attrs = {'axis': axis}
S
SunAhong1993 已提交
559
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
560
            "paddle.concat",
S
SunAhong1993 已提交
561
            inputs={"x": inputs_list},
S
SunAhong1993 已提交
562 563 564 565
            outputs=[node.layer_name],
            **layer_attrs)

    def ReLU(self, node):
S
SunAhong1993 已提交
566
        relu_name = name_generator("relu", self.nn_name2id)
S
SunAhong1993 已提交
567 568 569 570
        output_name = node.layer_name
        layer_outputs = [relu_name, output_name]
        assert len(
            node.inputs) == 1, "The count of RelU node\'s input is not 1."
S
SunAhong1993 已提交
571
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
572 573 574 575
        params = node.layer.relu_param
        if params.HasField('negative_slope') and params.negative_slope != 0:
            negative_slope = float(params.negative_slope)

576
            layer_attrs = {'negative_slope': negative_slope}
S
SunAhong1993 已提交
577
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
578
                "paddle.nn.LeakyReLU",
S
SunAhong1993 已提交
579
                inputs={"input": input.name},
S
SunAhong1993 已提交
580 581 582
                outputs=layer_outputs,
                **layer_attrs)
        else:
S
SunAhong1993 已提交
583
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
584
                "paddle.nn.ReLU",
S
SunAhong1993 已提交
585
                inputs={"input": input.name},
S
SunAhong1993 已提交
586 587 588
                outputs=layer_outputs)

    def PReLU(self, node):
S
SunAhong1993 已提交
589
        prelu_name = name_generator("prelu", self.nn_name2id)
S
SunAhong1993 已提交
590 591 592 593
        output_name = node.layer_name
        layer_outputs = [prelu_name, output_name]
        assert len(
            node.inputs) == 1, "The count of PReLU node\'s input is not 1."
S
SunAhong1993 已提交
594
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
595 596
        params = node.layer.prelu_param
        mode_bool = params.channel_shared
S
SunAhong1993 已提交
597
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
598
        if mode_bool:
S
SunAhong1993 已提交
599
            num_parameters = 1
S
SunAhong1993 已提交
600
        else:
S
SunAhong1993 已提交
601
            num_parameters = output_shape[1]
S
SunAhong1993 已提交
602 603 604 605
        data = node.data
        self.params[prelu_name + '._weight'] = np.squeeze(data[0])
        assert data is not None, "The parameter of {} (type is {}) is not set. You need to use python package of caffe to set the default value.".format(
            node.layer_name, node.layer_type)
S
SunAhong1993 已提交
606
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
607
            "paddle.nn.PReLU",
S
SunAhong1993 已提交
608
            inputs={"input": input.name},
S
SunAhong1993 已提交
609
            outputs=layer_outputs,
S
SunAhong1993 已提交
610
            num_parameters=num_parameters)
S
SunAhong1993 已提交
611 612 613 614 615 616 617

    def Eltwise(self, node):
        assert len(
            node.inputs) == 2, "The count of Eltwise node\'s input is not 2."
        params = node.layer.eltwise_param
        mode = params.operation
        inputs = []
S
SunAhong1993 已提交
618 619 620 621
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        input0_name = input0.name
        input1_name = input1.name
S
SunAhong1993 已提交
622 623 624 625
        if mode == 0:
            inputs_dict = {}
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
626
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
627 628 629 630 631 632
                "paddle.multiply",
                inputs=inputs_dict,
                outputs=[node.layer_name])
        elif mode == 1:
            if hasattr(params, 'coeff') and len(params.coeff) == 2:
                coeff = params.coeff
S
SunAhong1993 已提交
633
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
634
                    "paddle.scale",
S
SunAhong1993 已提交
635 636
                    inputs={"x": input0_name},
                    outputs=[node.layer_name + '_mul0'],
S
SunAhong1993 已提交
637
                    scale=coeff[0])
S
SunAhong1993 已提交
638
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
639
                    "paddle.scale",
S
SunAhong1993 已提交
640 641
                    inputs={"x": input1_name},
                    outputs=[node.layer_name + '_mul1'],
S
SunAhong1993 已提交
642
                    scale=coeff[1])
S
SunAhong1993 已提交
643 644 645
                inputs_dict = {}
                inputs_dict['x'] = node.layer_name + '_mul0'
                inputs_dict['y'] = node.layer_name + '_mul1'
S
SunAhong1993 已提交
646
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
647
                    "paddle.add", inputs=inputs_dict,
S
SunAhong1993 已提交
648 649 650 651 652
                    outputs=[node.layer_name])
            else:
                inputs_dict = {}
                inputs_dict['x'] = input0_name
                inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
653
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
654
                    "paddle.add", inputs=inputs_dict,
S
SunAhong1993 已提交
655 656 657 658 659
                    outputs=[node.layer_name])
        else:
            inputs_dict = {}
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
660
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
661
                "paddle.max", inputs=inputs_dict, outputs=[node.layer_name])
S
SunAhong1993 已提交
662 663

    def BatchNorm(self, node):
S
SunAhong1993 已提交
664
        batchnorm_name = name_generator("batchnorm", self.nn_name2id)
S
SunAhong1993 已提交
665 666 667 668
        output_name = node.layer_name
        layer_outputs = [batchnorm_name, output_name]
        assert len(
            node.inputs) == 1, "The count of BatchNorm node\'s input is not 1."
S
SunAhong1993 已提交
669
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
670 671 672 673 674 675 676 677 678
        params = node.layer.batch_norm_param
        if hasattr(params, "eps"):
            eps = params.eps
        else:
            eps = 1e-5
        if node.data is None or len(node.data) != 3:
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
679 680
            mean = np.zeros([node.in_shapes[0][1], ]).astype("float32")
            variance = np.zeros([node.in_shapes[0][1], ]).astype("float32")
S
SunAhong1993 已提交
681 682 683 684 685 686 687 688 689 690 691 692
            scale = 0
        else:

            node.data = [np.squeeze(i).astype("float32") for i in node.data]
            mean, variance, scale = node.data
        # Prescale the stats
        scaling_factor = 1.0 / scale if scale != 0 else 0
        mean *= scaling_factor
        variance *= scaling_factor
        self.params[batchnorm_name + "._mean"] = mean
        self.params[batchnorm_name + '._variance'] = variance
        layer_attrs = {
S
SunAhong1993 已提交
693
            "num_features": node.in_shapes[0][1],
S
SunAhong1993 已提交
694 695 696 697
            "epsilon": eps,
            "weight_attr": False,
            "bias_attr": False,
        }
S
SunAhong1993 已提交
698 699 700 701 702
        if len(node.in_shapes[0]) == 2:
            self.paddle_graph.add_layer(
                "paddle.unsqueeze",
                inputs={"x": input.name},
                outputs=[input.name],
S
SunAhong1993 已提交
703
                axis=[2, 3])
S
SunAhong1993 已提交
704
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
705
            "paddle.nn.BatchNorm2D",
S
SunAhong1993 已提交
706
            inputs={"input": input.name},
S
SunAhong1993 已提交
707 708
            outputs=layer_outputs,
            **layer_attrs)
S
SunAhong1993 已提交
709 710 711 712 713
        if len(node.in_shapes[0]) == 2:
            self.paddle_graph.add_layer(
                "paddle.squeeze",
                inputs={"x": node.layer_name},
                outputs=[node.layer_name],
S
SunAhong1993 已提交
714 715
                axis=[2, 3])

S
SunAhong1993 已提交
716 717 718 719 720
    def Scale(self, node):
        if node.data is None:
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
721
            self.params[node.layer_name + "_cparam1"] = np.zeros([
S
SunAhong1993 已提交
722
                node.in_shapes[0][1],
S
SunAhong1993 已提交
723
            ]).astype("float32")
S
SunAhong1993 已提交
724
            self.params[node.layer_name + "_cparam2"] = np.zeros([
S
SunAhong1993 已提交
725
                node.in_shapes[0][1],
S
SunAhong1993 已提交
726 727
            ]).astype("float32")
        else:
S
SunAhong1993 已提交
728
            self.params[node.layer_name + "_cparam1"] = np.squeeze(node.data[
S
SunAhong1993 已提交
729
                0]).astype("float32")
S
SunAhong1993 已提交
730 731 732 733 734
            if not node.layer.scale_param.bias_term:
                self.params[node.layer_name + "_cparam2"] = np.zeros([
                    node.in_shapes[0][1],
                ]).astype("float32")
            else:
S
SunAhong1993 已提交
735 736
                self.params[node.layer_name + "_cparam2"] = np.squeeze(
                    node.data[1]).astype("float32")
S
SunAhong1993 已提交
737 738 739 740
        params = node.layer.scale_param
        axis = params.axis
        inputs = []
        if len(node.inputs) == 2:
S
SunAhong1993 已提交
741 742 743 744
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input1 = self.graph.get_input_node(node, idx=1, copy=True)
            input0_name = input0.name
            input1_name = input1.name
S
SunAhong1993 已提交
745 746 747
            inputs_dict = {}
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
748
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
749 750 751 752 753
                "paddle.multiply",
                inputs=inputs_dict,
                outputs=[node.layer_name + "_mul"],
                axis=1)
        else:
S
SunAhong1993 已提交
754 755
            self.paddle_graph.add_layer(
                "self.create_parameter",
S
SunAhong1993 已提交
756 757
                inputs={},
                outputs=[node.layer_name + "_cparam1"],
S
SunAhong1993 已提交
758 759
                shape=self.params[node.layer_name + "_cparam1"].shape,
                attr=string(node.layer_name + "_cparam1"))
S
SunAhong1993 已提交
760 761
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input0_name = input0.name
S
SunAhong1993 已提交
762 763 764
            inputs_dict = {}
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = node.layer_name + "_cparam1"
S
SunAhong1993 已提交
765 766 767 768 769 770 771 772 773 774 775
            if len(node.in_shapes[0]) == 2:
                self.paddle_graph.add_layer(
                    "paddle.multiply",
                    inputs=inputs_dict,
                    outputs=[node.layer_name + "_mul"])
            else:
                self.paddle_graph.add_layer(
                    "paddle.multiply",
                    inputs=inputs_dict,
                    outputs=[node.layer_name + "_mul"],
                    axis=axis)
S
SunAhong1993 已提交
776 777 778 779 780 781
        self.paddle_graph.add_layer(
            "self.create_parameter",
            inputs={},
            outputs=[node.layer_name + "_cparam2"],
            shape=self.params[node.layer_name + "_cparam2"].shape,
            attr=string(node.layer_name + "_cparam2"))
S
SunAhong1993 已提交
782 783 784
        inputs_dict = {}
        inputs_dict['x'] = node.layer_name + "_mul"
        inputs_dict['y'] = node.layer_name + "_cparam2"
S
SunAhong1993 已提交
785
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
786 787
        if axis == -1:
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
788
                "paddle.add", inputs=inputs_dict, outputs=[node.layer_name])
S
SunAhong1993 已提交
789 790 791 792 793 794
        else:
            if axis < 0:
                axis = axis + len(output_shape)
            param2_shape = self.params[node.layer_name + "_cparam2"].shape
            param2_shape_len = len(param2_shape)
            diff_len = len(output_shape) - axis - param2_shape_len
S
SunAhong1993 已提交
795
            new_shape = list(param2_shape) + [1] * diff_len
S
SunAhong1993 已提交
796 797 798 799 800 801
            self.paddle_graph.add_layer(
                "paddle.reshape",
                inputs={"x": node.layer_name + "_cparam2"},
                outputs=[node.layer_name + "_cparam2"],
                shape=new_shape)
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
802 803
                "paddle.add", inputs=inputs_dict, outputs=[node.layer_name])

S
SunAhong1993 已提交
804
    def Reshape(self, node):
S
SunAhong1993 已提交
805 806
        input = self.graph.get_input_node(node, idx=0, copy=True)
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
807
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
808
            "paddle.reshape",
S
SunAhong1993 已提交
809
            inputs={"x": input.name},
S
SunAhong1993 已提交
810
            outputs=[node.layer_name],
S
SunAhong1993 已提交
811
            shape=output_shape)
S
SunAhong1993 已提交
812 813 814 815 816

    def ArgMax(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
        ) == 1, "The count of ArgMax node\'s input and output is not 1."
S
SunAhong1993 已提交
817 818
        input = self.graph.get_input_node(node, idx=0, copy=True)
        input_shape = node.in_shapes[0]
S
SunAhong1993 已提交
819 820 821 822
        params = node.layer.argmax_param
        out_max_val = params.out_max_val if hasattr(params,
                                                    out_max_val) else False
        top_k = params.top_k if hasattr(params, top_k) else 1
S
SunAhong1993 已提交
823
        axis = params.axis if hasattr(params, axis) else -1
S
SunAhong1993 已提交
824 825 826
        if axis < 0:
            axis += len(input_shape)
        if out_max_val is True:
S
SunAhong1993 已提交
827
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
828
                "paddle.topk",
S
SunAhong1993 已提交
829
                inputs={"x": input.name},
S
SunAhong1993 已提交
830 831 832 833
                outputs=[
                    node.layer_name + "_topk_var",
                    node.layer_name + "_index_var"
                ],
S
SunAhong1993 已提交
834
                k=top_k)
S
SunAhong1993 已提交
835
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
836 837 838 839
                "paddle.cast",
                inputs={"x": node.layer_name + "_index_var"},
                outputs=[node.layer_name + "_index_var"],
                dtype="{}_topk_var.dtype".format(node.layer_name))
S
SunAhong1993 已提交
840
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
841
                "paddle.concat",
S
SunAhong1993 已提交
842 843 844 845 846 847
                inputs={
                    "x": [
                        node.layer_name + "_topk_var",
                        node.layer_name + "_index_var"
                    ]
                },
S
SunAhong1993 已提交
848 849 850
                outputs=[node.layer_name],
                axis=axis)
        else:
S
SunAhong1993 已提交
851
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
852
                "paddle.topk",
S
SunAhong1993 已提交
853
                inputs={"x": input.name},
S
SunAhong1993 已提交
854 855
                outputs=["_", node.layer_name],
                k=top_k)
S
SunAhong1993 已提交
856

S
SunAhong1993 已提交
857 858 859 860
    def Axpy(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
        ) == 1, "The count of Axpy node\'s input and output is not 1."
S
SunAhong1993 已提交
861
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
862
        params = node.layer.axpy_param
S
SunAhong1993 已提交
863 864 865 866 867 868
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        input2 = self.graph.get_input_node(node, idx=2, copy=True)
        input0_name = input0.name
        input1_name = input1.name
        input2_name = input2.name
S
SunAhong1993 已提交
869 870 871
        inputs_dict = {}
        inputs_dict['x'] = input1_name
        inputs_dict['y'] = input0_name
S
SunAhong1993 已提交
872
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
873 874 875 876 877 878 879
            "paddle.multiply",
            inputs=inputs_dict,
            outputs=[node.layer_name + "_mul"],
            axis=0)
        inputs_dict = {}
        inputs_dict['x'] = node.layer_name + "_mul"
        inputs_dict['y'] = input2_name
S
SunAhong1993 已提交
880
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
881 882 883 884 885 886 887
            "paddle.add",
            inputs=inputs_dict,
            outputs=[node.layer_name + "_mul"])

    def Crop(self, node):
        assert len(
            node.inputs) == 2, "The count of Crop node\'s input is not 2."
S
SunAhong1993 已提交
888 889
        input = self.graph.get_input_node(node, idx=0, copy=True)
        example = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
890 891
        params = node.layer.crop_param
        axis = params.axis
S
SunAhong1993 已提交
892
        input_shape = node.in_shapes[0]
S
SunAhong1993 已提交
893 894 895 896 897 898 899 900 901
        if axis < 0:
            axis += len(input_shape)
        offset_real = [0] * len(input_shape)
        if hasattr(params, "offset") and len(params.offset) > 0:
            offset = list(params.offset)
            assert (len(input_shape) - axis
                    ) == len(offset), "invalid offset[%s] in crop layer" % (
                        str(offset))
            offset_real = [0] * axis + offset
S
SunAhong1993 已提交
902
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
903 904 905 906 907
            "paddle.crop",
            inputs={"x": input.name},
            outputs=[node.layer_name],
            shape=node.in_shapes[1],
            offsets=list(offset_real))
S
SunAhong1993 已提交
908 909 910 911 912

    def Flatten(self, node):
        assert len(
            node.
            inputs) == 1, "The count of DetectionOutput node\'s input is not 1."
S
SunAhong1993 已提交
913
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
914
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
915
            "paddle.reshape",
S
SunAhong1993 已提交
916
            inputs={"x": input.name},
S
SunAhong1993 已提交
917
            outputs=[node.layer_name],
S
SunAhong1993 已提交
918
            shape=node.out_shapes[0])
S
SunAhong1993 已提交
919 920 921 922

    def Power(self, node):
        assert len(
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
923
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
924 925 926 927 928 929
        params = node.layer.power_param
        layer_attrs = {
            'scale': params.scale,
            'bias': params.shift,
            'bias_after_scale': True
        }
S
SunAhong1993 已提交
930
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
931
            "paddle.scale",
S
SunAhong1993 已提交
932
            inputs={"x": input.name},
S
SunAhong1993 已提交
933 934
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
935
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
936 937 938 939 940 941 942 943
            "paddle.pow",
            inputs={"x": node.layer_name},
            outputs=[node.layer_name],
            exponent=params.power)

    def Reduction(self, node):
        assert len(
            node.inputs) == 1, "The count of Reduction node\'s input is not 1."
S
SunAhong1993 已提交
944
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
945 946 947 948 949 950
        params = node.layer.reduction_param
        operation = params.operation
        axis = params.axis
        coeff = params.coeff
        assert operation >= 1 and operation <= 4, "reduction reduction [%s] error" % (
            operation)
S
SunAhong1993 已提交
951
        input_len = len(node.in_shapes[0])
S
SunAhong1993 已提交
952 953 954 955
        if axis < 0:
            axis += input_len + 1
        dim = list(range(input_len))
        # operation = SUM
S
SunAhong1993 已提交
956
        if operation == 1:
S
SunAhong1993 已提交
957 958 959 960
            layer_attrs = {
                "dim": dim[axis:],
                "keep_dim": False,
            }
S
SunAhong1993 已提交
961
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
962
                "paddle.sum",
S
SunAhong1993 已提交
963
                inputs={"input": input.name},
S
SunAhong1993 已提交
964 965 966
                outputs=[node.layer_name],
                **layer_attrs)
        # operation = ASUM
S
SunAhong1993 已提交
967
        elif operation == 2:
S
SunAhong1993 已提交
968
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
969
                "paddle.abs",
S
SunAhong1993 已提交
970
                inputs={"x": input.name},
S
SunAhong1993 已提交
971 972 973 974 975
                outputs=[node.layer_name])
            layer_attrs = {
                "dim": dim[axis:],
                "keep_dim": False,
            }
S
SunAhong1993 已提交
976
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
977 978 979 980 981
                "paddle.sum",
                inputs={"input": node.layer_name},
                outputs=[node.layer_name],
                **layer_attrs)
        # operation = SUMSQ
S
SunAhong1993 已提交
982
        elif operation == 3:
S
SunAhong1993 已提交
983
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
984
                "paddle.pow",
S
SunAhong1993 已提交
985
                inputs={"x": input.name},
S
SunAhong1993 已提交
986 987 988 989 990 991
                outputs=[node.layer_name],
                exponent=2.0)
            layer_attrs = {
                "dim": dim[axis:],
                "keep_dim": False,
            }
S
SunAhong1993 已提交
992
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
993 994 995 996 997
                "paddle.sum",
                inputs={"input": node.layer_name},
                outputs=[node.layer_name],
                **layer_attrs)
        # operation = MEAN
S
SunAhong1993 已提交
998
        else:
S
SunAhong1993 已提交
999
            layer_attrs = {
S
SunAhong1993 已提交
1000 1001
                "axis": dim[axis:],
                "keepdim": False,
S
SunAhong1993 已提交
1002
            }
S
SunAhong1993 已提交
1003
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1004
                "paddle.mean",
S
SunAhong1993 已提交
1005
                inputs={"x": input.name},
S
SunAhong1993 已提交
1006 1007
                outputs=[node.layer_name],
                **layer_attrs)
S
SunAhong1993 已提交
1008
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1009 1010 1011 1012
            "paddle.scale",
            inputs={"x": node.layer_name},
            outputs=[node.layer_name],
            scale=coeff)
S
SunAhong1993 已提交
1013

S
SunAhong1993 已提交
1014
    def DetectionOutput(self, node):
S
SunAhong1993 已提交
1015 1016
        detection_output_name = name_generator("detection_output",
                                               self.nn_name2id)
S
SunAhong1993 已提交
1017 1018
        output_name = node.layer_name
        layer_outputs = [detection_output_name, output_name]
S
SunAhong1993 已提交
1019
        assert len(
S
SunAhong1993 已提交
1020 1021
            node.
            inputs) == 3, "The count of DetectionOutput node\'s input is not 3."
S
SunAhong1993 已提交
1022
        inputs_dict = dict()
S
SunAhong1993 已提交
1023
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
1024
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1025
            if i == 1:
S
SunAhong1993 已提交
1026
                input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1027 1028 1029
                while input is not None \
                      and input.layer_type != 'Softmax' \
                      and input.layer_type != 'Sigmoid':
S
SunAhong1993 已提交
1030
                    input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
1031
                assert input is not None, 'This kind of DetectionOutput is not supported!'
S
SunAhong1993 已提交
1032 1033
                input = self.graph.get_input_node(input, idx=0, copy=True)
            inputs_dict["x{}".format(i)] = input.name
S
SunAhong1993 已提交
1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052
        params = node.layer.detection_output_param
        nms_param = params.nms_param
        nms_param_dict = dict()
        nms_param_dict["nms_threshold"] = nms_param.nms_threshold
        nms_param_dict["top_k"] = nms_param.top_k
        nms_param_dict["eta"] = nms_param.eta
        if nms_param is None:
            nms_param_dict = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        default = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        fields = ["eta", "top_k", "nms_threshold"]
        for f in default.keys():
            if f not in nms_param_dict:
                nms_param_dict[f] = default[f]
        layer_attrs = {
            "background_label": params.background_label_id,
            "nms_threshold": nms_param_dict["nms_threshold"],
            "nms_top_k": nms_param_dict["top_k"],
            "keep_top_k": params.keep_top_k,
            "score_threshold": params.confidence_threshold,
S
SunAhong1993 已提交
1053 1054
            "nms_eta": nms_param_dict["eta"]
        }
S
SunAhong1993 已提交
1055 1056
        self.paddle_graph.add_layer(
            kernel="custom_layer:DetectionOutput",
S
SunAhong1993 已提交
1057
            inputs=inputs_dict,
S
SunAhong1993 已提交
1058
            outputs=layer_outputs,
S
SunAhong1993 已提交
1059
            **layer_attrs)
S
SunAhong1993 已提交
1060

S
SunAhong1993 已提交
1061
    def Normalize(self, node):
S
SunAhong1993 已提交
1062 1063 1064
        normalize_name = name_generator("normalize", self.nn_name2id)
        output_name = node.layer_name
        layer_outputs = [normalize_name, output_name]
S
SunAhong1993 已提交
1065 1066
        assert len(
            node.inputs) == 1, "The count of Normalize node\'s input is not 1."
S
SunAhong1993 已提交
1067
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1068
        params = node.layer.norm_param
S
SunAhong1993 已提交
1069
        param_name = node.layer_name + "_scale"
S
SunAhong1993 已提交
1070 1071 1072 1073
        if node.data is None or len(node.data) != 1:
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
1074 1075
            self.params[param_name] = \
                np.zeros([1] if params.channel_shared else [node.in_shapes[0][1]]).astype("float32")
S
SunAhong1993 已提交
1076
        else:
S
SunAhong1993 已提交
1077
            self.params[param_name] = _adjust_parameters(node)[0]
S
SunAhong1993 已提交
1078

S
SunAhong1993 已提交
1079 1080 1081 1082 1083 1084 1085
        self.paddle_graph.add_layer(
            "self.create_parameter",
            inputs={},
            outputs=[param_name],
            shape=self.params[param_name].shape,
            attr=string(param_name))
        inputs_dict = {}
S
SunAhong1993 已提交
1086
        layer_attrs = {"axis": -1 if params.channel_shared else 1}
S
SunAhong1993 已提交
1087
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1088
            "custom_layer:Normalize",
S
SunAhong1993 已提交
1089 1090
            inputs={"x": input.name,
                    "param": param_name},
S
SunAhong1993 已提交
1091 1092
            outputs=layer_outputs,
            **layer_attrs)
S
SunAhong1993 已提交
1093

S
SunAhong1993 已提交
1094 1095 1096
    def Permute(self, node):
        assert len(
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
1097
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1098
        params = node.layer.permute_param
S
SunAhong1993 已提交
1099
        order = list(params.order)
S
SunAhong1993 已提交
1100
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1101
            "paddle.transpose",
S
SunAhong1993 已提交
1102
            inputs={"x": input.name},
S
SunAhong1993 已提交
1103 1104
            outputs=[node.layer_name],
            perm=order)
S
SunAhong1993 已提交
1105

S
SunAhong1993 已提交
1106
    def PriorBox(self, node):
S
SunAhong1993 已提交
1107 1108 1109
        priorbox_name = name_generator("priorbox", self.nn_name2id)
        output_name = node.layer_name
        layer_outputs = [priorbox_name, output_name]
S
SunAhong1993 已提交
1110 1111
        assert len(
            node.inputs) == 2, "The count of PriorBox node\'s input is not 2."
S
SunAhong1993 已提交
1112 1113
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
1114
        inputs_dict = {}
S
SunAhong1993 已提交
1115 1116
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
S
SunAhong1993 已提交
1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129
        params = node.layer.prior_box_param
        steps = tuple(params.step) if type(params.step) \
                is list or type(params.step) is tuple \
                else (params.step, params.step)
        layer_attrs = {
            "min_sizes": params.min_size,
            "max_sizes": params.max_size,
            "aspect_ratios": params.aspect_ratio,
            "variance": params.variance,
            "flip": params.flip,
            "clip": params.clip,
            "steps": steps,
            "offset": params.offset,
S
SunAhong1993 已提交
1130 1131
            "min_max_aspect_ratios_order": True
        }
S
SunAhong1993 已提交
1132 1133
        self.paddle_graph.add_layer(
            "custom_layer:PriorBox",
S
SunAhong1993 已提交
1134
            inputs=inputs_dict,
S
SunAhong1993 已提交
1135
            outputs=layer_outputs,
S
SunAhong1993 已提交
1136
            **layer_attrs)
S
SunAhong1993 已提交
1137

S
SunAhong1993 已提交
1138
    def ReLU6(self, node):
S
SunAhong1993 已提交
1139
        relu6_name = name_generator("relu6", self.nn_name2id)
S
SunAhong1993 已提交
1140 1141 1142 1143
        output_name = node.layer_name
        layer_outputs = [relu6_name, output_name]
        assert len(
            node.inputs) == 1, "The count of RelU6 node\'s input is not 1."
S
SunAhong1993 已提交
1144
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1145
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1146
            "paddle.nn.ReLU6",
S
SunAhong1993 已提交
1147
            inputs={"input": input.name},
S
SunAhong1993 已提交
1148
            outputs=layer_outputs)
S
SunAhong1993 已提交
1149

S
SunAhong1993 已提交
1150
    def ROIPooling(self, node):
S
SunAhong1993 已提交
1151 1152 1153
        roipooling_name = name_generator("roipooling", self.nn_name2id)
        output_name = node.layer_name
        layer_outputs = [roipooling_name, output_name]
S
SunAhong1993 已提交
1154 1155
        assert len(
            node.inputs) == 2, "The count of ROIPooling node\'s input is not 2."
S
SunAhong1993 已提交
1156 1157
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
1158
        inputs_dict = {}
S
SunAhong1993 已提交
1159 1160
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
S
SunAhong1993 已提交
1161 1162 1163 1164
        params = node.layer.roi_pooling_param
        layer_attrs = {
            "pooled_height": params.pooled_h,
            "pooled_width": params.pooled_w,
S
SunAhong1993 已提交
1165 1166
            "spatial_scale": params.spatial_scale
        }
S
SunAhong1993 已提交
1167
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1168
            "custom_layer:ROIPooling",
S
SunAhong1993 已提交
1169
            inputs=inputs_dict,
S
SunAhong1993 已提交
1170
            outputs=layer_outputs,
S
SunAhong1993 已提交
1171
            **layer_attrs)
S
SunAhong1993 已提交
1172

S
SunAhong1993 已提交
1173
    def ShuffleChannel(self, node):
S
SunAhong1993 已提交
1174 1175
        assert len(node.inputs
                   ) == 1, "The count of ShuffleChannel node\'s input is not 1."
S
SunAhong1993 已提交
1176
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1177
        params = node.layer.shuffle_channel_param
S
SunAhong1993 已提交
1178
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1179
            "paddle.fluid.layers.shuffle_channel",
S
SunAhong1993 已提交
1180
            inputs={"x": input.name},
S
SunAhong1993 已提交
1181 1182
            outputs=[node.layer_name],
            group=params.group)
S
SunAhong1993 已提交
1183

S
SunAhong1993 已提交
1184 1185 1186
    def Upsample(self, node):
        assert len(
            node.inputs) == 1, "The count of Upsample node\'s input is not 1."
S
SunAhong1993 已提交
1187
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1188 1189 1190 1191
        params = node.layer.upsample_param
        layer_attrs = {
            "align_corners": False,
            "scale_factor": params.scale,
S
SunAhong1993 已提交
1192 1193
            "mode": "nearest"
        }
S
SunAhong1993 已提交
1194
        self.paddle_graph.add_layer(
1195 1196
            "paddle.nn.functional.interpolate",
            inputs={"x": input.name},
S
SunAhong1993 已提交
1197 1198
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
1199

S
SunAhong1993 已提交
1200
    def Select(self, node):
S
SunAhong1993 已提交
1201 1202 1203
        select_name = name_generator("select", self.nn_name2id)
        output_name = node.layer_name
        layer_outputs = [select_name, output_name]
S
SunAhong1993 已提交
1204 1205
        assert len(
            node.inputs) == 1, "The count of Select node\'s input is not 1."
S
SunAhong1993 已提交
1206 1207
        input = self.graph.get_input_node(node, idx=0, copy=True)
        input_shape = node.in_shapes[0]
S
SunAhong1993 已提交
1208 1209 1210 1211
        params = node.layer.select_param
        layer_attrs = {
            "input_shape": input_shape,
            "point": params.slice_point,
S
SunAhong1993 已提交
1212 1213
            "axis": params.axis
        }
S
SunAhong1993 已提交
1214 1215
        self.paddle_graph.add_layer(
            "custom_layer:Select",
S
SunAhong1993 已提交
1216
            inputs={"x": input.name},
S
SunAhong1993 已提交
1217
            outputs=layer_outputs,
S
SunAhong1993 已提交
1218
            **layer_attrs)