opset.py 52.0 KB
Newer Older
1
# Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
C
update  
channingss 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

C
Channingss 已提交
15
from x2paddle.decoder.onnx_decoder import ONNXGraph, ONNXGraphNode, ONNXGraphDataNode
C
update  
channingss 已提交
16 17 18
from x2paddle.core.graph import GraphNode
from x2paddle.core.fluid_code import Layer
from x2paddle.core.fluid_code import FluidCode
C
channingss 已提交
19
from x2paddle.core.util import string
C
Channingss 已提交
20
from x2paddle.op_mapper.onnx2paddle.opset9.custom_layer import *
C
Channingss 已提交
21
from functools import reduce
C
update  
channingss 已提交
22
import numpy as np
C
channingss 已提交
23
import onnx
C
channingss 已提交
24
import onnx.numpy_helper as numpy_helper
C
channingss 已提交
25
from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE
C
update  
channingss 已提交
26
import logging as _logging
27
from collections import OrderedDict
C
channingss 已提交
28
import math
C
channingss 已提交
29 30
import os
import shutil
31

C
update  
channingss 已提交
32 33 34
_logger = _logging.getLogger(__name__)


C
Channingss 已提交
35
def _const_weight_or_none(node, necessary=False):
C
channings 已提交
36
    if 'Constant' in node.layer_type:
C
channingss 已提交
37
        return node.value
C
update  
channingss 已提交
38 39
    if isinstance(node, ONNXGraphDataNode):
        return node.weight
C
Channingss 已提交
40 41 42
    if necessary:
        assert '{} should be an initializer or Constant operator.'.format(
            node.layer_name)
C
update  
channingss 已提交
43 44 45
    return None


C
Channingss 已提交
46
def _get_same_padding(in_size, kernel_size, stride):
C
channingss 已提交
47 48 49 50 51 52 53
    new_size = int(math.ceil(in_size * 1.0 / stride))
    pad_size = (new_size - 1) * stride + kernel_size - in_size
    pad0 = int(pad_size / 2)
    pad1 = pad_size - pad0
    return [pad0, pad1]


54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
def print_mapping_info(func):
    def run_mapping(*args, **kwargs):
        node = args[1]
        try:
            res = func(*args, **kwargs)
        except:
            print("convert failed node:{}, op_type is {}".format(
                node.layer_name[9:], node.layer_type))
            raise
        else:
            #print("convert successfully node:{}, op_type is {}".format(
            #    node.layer_name[9:], node.layer_type))
            return res

    return run_mapping


C
Channingss 已提交
71
class OpSet9():
72 73 74 75 76
    elementwise_ops = {
        'Add': 'elementwise_add',
        'Div': 'elementwise_div',
        'Sub': 'elementwise_sub',
        'Mul': 'elementwise_mul',
R
root 已提交
77 78
        'Pow': 'elementwise_pow',
    }
79

80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113
    default_op_mapping_field_values = OrderedDict()
    default_op_mapping_field_values['FLUID_OP'] = ''
    default_op_mapping_field_values['FLUID_INPUT_ARGS'] = None
    default_op_mapping_field_values['FLUID_OUTPUT_ARGS'] = None
    default_op_mapping_field_values['ATTR_MAPPING'] = dict()
    default_op_mapping_field_values['DEFAULTS'] = dict()
    default_op_mapping_field_values['INPUT_PERM'] = None
    default_op_mapping_field_values['OUTPUT_PERM'] = None
    default_op_mapping_field_values['FILL_NAME_FIELD'] = True

    default_op_mapping = {
        'Shape': ['shape', ['X'], ['Out']],
        'Clip': [
            'clip', ['X'], ['Out'], dict(), dict(
                min=(np.asarray(
                    [255, 255, 127, 255], dtype=np.uint8).view(np.float32)[0]),
                max=(np.asarray(
                    [255, 255, 127, 127], dtype=np.uint8).view(np.float32)[0]),
            )
        ],
        'Erf': ['erf', ['X'], ['Out']],
        'Ceil': ['ceil', ['X'], ['Out']],
        'ReduceMean': [
            'reduce_mean', ['X'], ['Out'], dict(
                axes='dim', keepdims='keep_dim'), dict(keep_dim=1)
        ],
        'ReduceSum': [
            'reduce_sum', ['X'], ['Out'], dict(
                axes='dim', keepdims='keep_dim'), dict(keep_dim=1)
        ],
        'ReduceMin': [
            'reduce_min', ['X'], ['Out'], dict(
                axes='dim', keepdims='keep_dim'), dict(keep_dim=1)
        ],
C
Channingss 已提交
114 115 116 117
        'ReduceMax': [
            'reduce_max', ['X'], ['Out'], dict(
                axes='dim', keepdims='keep_dim'), dict(keep_dim=1)
        ],
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
        #active function
        'Relu': ['relu', ['X'], ['Out']],
        'LeakyRelu': ['leaky_relu', ['X'], ['Out'], dict(), dict(alpha=.01)],
        'Elu': ['elu', ['X'], ['Out'], dict(), dict(alpha=1.)],
        'ThresholdedRelu': [
            'thresholded_relu', ['X'], ['Out'], dict(alpha='threshold'),
            dict(alpha=1.)
        ],
        'Tanh': ['tanh', ['X'], ['Out']],
        'Sigmoid': ['sigmoid', ['X'], ['Out']],
        'HardSigmoid': [
            'hard_sigmoid', ['X'], ['Out'], dict(
                alpha='slope', beta='offset'), dict(
                    slope=.2, offset=.5)
        ],
        'Softsign': ['softsign', ['X'], ['Out']],
        'Softplus': ['softplus', ['X'], ['Out']],
        'Exp': ['exp', ['X'], ['Out']],
        'Softmax': ['softmax', ['X'], ['Out'], dict(), dict(axis=1)],
        'Sqrt': ['sqrt', ['X'], ['Out']],
        'Floor': ['floor', ['X'], ['Out']],
        'Abs': ['abs', ['X'], ['Out']],
    }

C
Channingss 已提交
142
    default_ioa_constraint = {}
143 144

    def __init__(self, decoder):
C
Channingss 已提交
145
        super(OpSet9, self).__init__()
146
        self.graph = decoder.graph
C
update  
channingss 已提交
147 148 149
        self.input_shapes = []
        self.weights = dict()
        self.omit_nodes = list()
C
channingss 已提交
150
        self.used_custom_layers = dict()
R
root 已提交
151

152
    @print_mapping_info
C
channingss 已提交
153
    def directly_map(self, node, name='', *args, **kwargs):
C
update  
channingss 已提交
154 155 156 157
        inputs = node.layer.input
        outputs = node.layer.output
        op_type = node.layer_type
        attrs = node.attr_map
158 159 160
        info = self.default_op_mapping[op_type]
        info.extend(
            list(self.default_op_mapping_field_values.values())[len(info):])
C
update  
channingss 已提交
161 162 163 164 165 166 167 168
        (
            fluid_op,
            fluid_input_args,
            fluid_output_args,
            attr_mapping,
            default_attrs,
            input_perm,
            output_perm,
169
            fill_name_field, ) = info
C
update  
channingss 已提交
170

171 172
        if fluid_op in self.default_ioa_constraint:
            for predicate, message in self.default_ioa_constraint[fluid_op]:
C
update  
channingss 已提交
173 174 175 176 177 178 179 180 181 182 183 184
                assert predicate(inputs, outputs, attrs), message

        mapped_attrs = {
            attr_mapping.get(key, key): value
            for key, value in attrs.items()
        }
        if '' in mapped_attrs:
            mapped_attrs.pop('')
        if '_' in mapped_attrs:
            mapped_attrs.pop('_')
        fluid_attrs = default_attrs.copy()
        fluid_attrs.update(mapped_attrs)
C
channingss 已提交
185
        inputs = inputs if input_perm is None else list(
C
update  
channingss 已提交
186
            map(lambda i: inputs[i], input_perm))
C
channingss 已提交
187 188 189 190
        val_inps = []
        for idx, ipt in enumerate(inputs):
            val_inps.append(self.graph.get_input_node(node, idx=idx, copy=True))

C
update  
channingss 已提交
191 192 193
        val_outs = outputs if output_perm is None else list(
            map(lambda i: outputs[i], output_perm))
        attr = fluid_attrs
C
channingss 已提交
194
        assert len(val_inps) == 1, 'directly_map error with multi inputs'
195
        if fluid_op not in ['shape', 'erf']:
C
update  
channingss 已提交
196
            attr['name'] = string(node.layer_name)
197 198 199 200 201 202 203 204 205 206
        node.fluid_code.add_layer(
            fluid_op, inputs=val_inps[0], output=val_outs[0], param_attr=attr)
        if fluid_op in ['shape']:
            node.fluid_code.add_layer(
                'cast',
                inputs=val_outs[0],
                output=val_outs[0],
                param_attr={'dtype': string('int64')})

    @print_mapping_info
C
channingss 已提交
207 208 209
    def deal_custom_layer(self, node):
        op = node.layer_type
        custom_code, func = make_custom_layer(node)
C
channingss 已提交
210
        child_func_code, child_func = make_custom_child_func(node)
C
channingss 已提交
211 212 213
        params = get_params(node.layer, node.layer_type)
        arg_names, kwargs = set_args(func, params)
        kwargs['name'] = string(node.layer_name)
214 215 216 217 218 219
        node.fluid_code.add_layer(
            func.__code__.co_name,
            inputs=node.inputs,
            output=node,
            param_attr=kwargs,
            is_custom_layer=True)
C
channingss 已提交
220 221
        if op not in self.used_custom_layers:
            self.used_custom_layers[op] = custom_code
C
channingss 已提交
222
            if op + '_child_func' not in self.used_custom_layers:
C
channingss 已提交
223 224 225
                if child_func_code is not None:
                    self.used_custom_layers[op +
                                            '_child_func'] = child_func_code
R
root 已提交
226

227
    @print_mapping_info
228 229 230
    def elementwise_map(self, node):
        assert node.layer_type in self.elementwise_ops
        op_type = self.elementwise_ops[node.layer_type]
R
root 已提交
231

232 233 234 235
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
        val_y_shape = val_y.out_shapes[0]
        val_x_shape = val_x.out_shapes[0]
R
root 已提交
236 237

        if len(val_x_shape) < len(val_y_shape):
238
            val_x, val_y = val_y, val_x
239
            val_y_shape, val_x_shape = val_x_shape, val_y_shape
240 241 242

        str_y_shape = ','.join(str(e) for e in val_y_shape)
        str_x_shape = ','.join(str(e) for e in val_x_shape)
243
        slice_idx = 0
244 245 246 247 248 249
        if str_y_shape not in str_x_shape:
            for dim in val_y_shape:
                if dim == 1:
                    slice_idx += 1
                else:
                    break
250 251 252 253 254 255 256 257
        attr = {"name": string(node.layer_name)}
        if slice_idx < len(val_y_shape) and slice_idx > 0:
            val_y_reshaped = val_y_shape[slice_idx:]
            var_y_reshaped = val_y.layer_name + '_reshaped'
            attr_reshaped = {
                'shape': val_y_reshaped,
                'name': string(var_y_reshaped)
            }
258 259 260 261 262
            node.fluid_code.add_layer(
                'reshape',
                inputs=val_y,
                output=var_y_reshaped,
                param_attr=attr_reshaped)
263
            inputs = {'x': val_x, 'y': var_y_reshaped}
264 265
            node.fluid_code.add_layer(
                op_type, inputs=inputs, output=node, param_attr=attr)
266 267
        else:
            inputs = {'x': val_x, 'y': val_y}
268 269
            node.fluid_code.add_layer(
                op_type, inputs=inputs, output=node, param_attr=attr)
C
channingss 已提交
270

271
    @print_mapping_info
C
update  
channingss 已提交
272
    def place_holder(self, node):
C
channingss 已提交
273
        self.input_shapes.append(node.out_shapes[0])
R
root 已提交
274

C
channings 已提交
275 276
        shape = node.out_shapes[0]
        for i, dim_shape in enumerate(shape):
R
root 已提交
277 278 279
            if dim_shape == 0 and i == 0:
                shape[i] = 1
            if dim_shape == 0 and i != 0:
C
channings 已提交
280
                assert 'shape of input is not assigned'
C
update  
channingss 已提交
281 282
        attr = {
            "dtype": string(node.dtype),
C
channings 已提交
283
            "shape": shape,
C
update  
channingss 已提交
284 285 286 287
            "name": string(node.layer_name),
            "append_batch_size": 'False'
        }

288 289
        node.fluid_code.add_layer(
            "data", inputs=None, output=node, param_attr=attr)
C
update  
channingss 已提交
290

291
    @print_mapping_info
C
update  
channingss 已提交
292 293 294 295
    def create_parameter(self, node, parameter=None):
        if parameter is not None:
            node = parameter
        dtype = node.dtype
C
channingss 已提交
296
        shape = node.out_shapes[0]
C
channingss 已提交
297 298
        if len(node.weight.shape) == 0:
            shape = [1]
C
update  
channingss 已提交
299 300 301 302 303 304 305
        self.weights[node.layer_name] = node.weight
        attr = {
            'dtype': string(dtype),
            'shape': shape,
            'name': string(node.layer_name),
            'default_initializer': 'Constant(0.0)'
        }
306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321
        if dtype == 'bool':
            attr['dtype'] = string('int64')
            node.fluid_code.add_layer(
                "create_parameter", inputs=None, output=node, param_attr=attr)
            node.fluid_code.add_layer(
                "cast",
                inputs=node,
                output=node,
                param_attr={'dtype': string('bool')})
        elif dtype == 'uint8':
            attr['dtype'] = string('float32')
            node.fluid_code.add_layer(
                "create_parameter", inputs=None, output=node, param_attr=attr)
        else:
            node.fluid_code.add_layer(
                "create_parameter", inputs=None, output=node, param_attr=attr)
C
update  
channingss 已提交
322 323 324 325 326 327 328 329 330 331 332 333 334 335

    def _pad_if_asymmetric(self, node, pads, val_name):  # pads: SSEE
        assert len(pads) & 1 == 0
        symmetric = True
        ndims = len(pads) // 2
        for idx_dim in range(ndims):
            if pads[idx_dim] != pads[ndims + idx_dim]:
                symmetric = False
                break
        if symmetric:
            return pads[:ndims], val_name
        val_padded = self.Pad(node, op_independent=False)
        return [0] * ndims, val_padded

C
channingss 已提交
336
    def _interpolate(self, node):
C
channingss 已提交
337
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
Channingss 已提交
338
        inputs = {'input': val_x}
339
        if node.layer_type == 'Resize':
C
Channingss 已提交
340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365
            if len(node.layer.input) == 2:
                # opset 10
                val_scales = self.graph.get_input_node(node, idx=1, copy=True)
                inputs['scale'] = val_scales
            elif len(node.layer.input) == 3:
                # opset 11
                val_scales = self.graph.get_input_node(node, idx=2, copy=True)
                inputs['scale'] = val_scales
            elif len(node.layer.input) == 4:
                # opset 11
                val_sizes = self.graph.get_input_node(node, idx=3, copy=True)
                var_nc, var_hw = val_sizes.layer_name + '_nc', val_sizes.layer_name + '_hw'
                node.fluid_code.add_layer(
                    'split',
                    inputs=val_sizes,
                    output=var_nc + ',' + var_hw,
                    param_attr={
                        'dim': 0,
                        'num_or_sections': [2, 2],
                    })
                node.fluid_code.add_layer(
                    "cast",
                    inputs=var_hw,
                    output=var_hw,
                    param_attr={'dtype': string('int32')})
                inputs['out_shape'] = var_hw
366 367
        elif node.layer_type == 'Upsample':
            val_scales = self.graph.get_input_node(node, idx=1, copy=True)
C
Channingss 已提交
368
            inputs['scale'] = val_scales
R
root 已提交
369 370

        attr = {'name': string(node.layer_name)}
C
channingss 已提交
371 372
        mode = node.get_attr('mode', 'nearest')
        fluid_op = 'resize_{}'.format(mode)
373
        if 'linear' in mode:
R
root 已提交
374 375 376
            print(
                'Warnning: paddle not support op:resize wiht mode: linear, we use bilinear replace linear'
            )
377
            fluid_op = 'resize_bilinear'
378
        node.fluid_code.add_layer(
C
Channingss 已提交
379
            fluid_op, inputs=inputs, output=node, param_attr=attr)
R
root 已提交
380

381
    @print_mapping_info
C
channings 已提交
382 383 384
    def RoiAlign(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
385 386 387

        pooled_height = node.get_attr('output_height')
        pooled_width = node.get_attr('output_width')
C
channings 已提交
388 389 390
        spatial_scale = node.get_attr('spatial_scale')
        sampling_ratio = node.get_attr('sampling_ratio')
        attr = {
R
root 已提交
391 392 393 394 395
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
            'sampling_ratio': sampling_ratio,
        }
396 397 398 399 400 401 402 403
        node.fluid_code.add_layer(
            'roi_align',
            inputs={'input': val_x,
                    'rois': val_rois},
            output=node,
            param_attr=attr)

    @print_mapping_info
C
channings 已提交
404 405 406
    def MaxRoiPool(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
407

C
channings 已提交
408 409 410
        spatial_scale = node.get_attr('spatial_scale')
        pooled_height, pooled_width = node.get_attr('pooled_shape')
        attr = {
R
root 已提交
411 412 413 414
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
        }
415 416 417 418 419 420 421 422
        node.fluid_code.add_layer(
            'roi_pool',
            inputs={'input': val_x,
                    'rois': val_rois},
            output=node,
            param_attr=attr)

    @print_mapping_info
C
update  
channingss 已提交
423
    def Pad(self, node, op_independent=True):
C
channingss 已提交
424
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
425 426 427
        pads = node.get_attr('pads')
        mode = node.get_attr('mode', 'constant')
        value = node.get_attr('value', 0.)
C
channingss 已提交
428 429
        data_shape = val_x.out_shapes[0]
        output_shape = node.out_shapes[0]
C
update  
channingss 已提交
430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450
        assume_pad2d = False
        attr = {}
        if len(pads) == 4:
            assume_pad2d |= mode != 'constant'
            if data_shape:
                assume_pad2d |= data_shape and len(data_shape) == 4  # NCHW
            if output_shape:
                assume_pad2d |= output_shape and len(output_shape) == 4  # NCHW
        if assume_pad2d:
            fluid_op = 'pad2d'
            attr['data_format'] = string('NCHW')
            attr['mode'] = string(mode)
        else:
            attr = {'pad_value': value}
            fluid_op = 'pad'
        if len(pads) == 4:
            paddings = np.array(pads).reshape(
                (-1, 2)).transpose().flatten().tolist()  # SSEE -> SESE
        elif len(pads) == 8:
            paddings = np.array(pads).reshape(
                (-1, 4)).transpose().flatten().tolist()  # SSEE -> SESE
C
channingss 已提交
451 452 453 454
            if sum(paddings[:4]) == 0:
                fluid_op = 'pad2d'
                paddings = paddings[4:]
                attr['mode'] = string(mode)
C
update  
channingss 已提交
455 456 457
        attr['paddings'] = paddings
        if op_independent:
            attr['name'] = string(node.layer_name)
458 459
            node.fluid_code.add_layer(
                fluid_op, inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
460 461
        else:
            attr['name'] = string(node.layer_name + '_paded')
462 463 464 465 466
            node.fluid_code.add_layer(
                fluid_op,
                inputs=val_x,
                output=node.layer_name + '_paded',
                param_attr=attr)
C
update  
channingss 已提交
467 468
            return node.layer_name + '_paded'

469
    @print_mapping_info
C
update  
channingss 已提交
470
    def Unsqueeze(self, node):
C
channingss 已提交
471
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
472
        axes = node.get_attr('axes')
473
        attr = {'axes': axes, 'name': string(node.layer_name)}
R
root 已提交
474
        if len(val_x.out_shapes[0]) == 0:
475 476 477 478 479 480
            if node.layer_name:
                node.fluid_code.add_layer(
                    'reshape',
                    inputs=val_x,
                    output=node,
                    param_attr={'shape': [1]})
481
        else:
482 483
            node.fluid_code.add_layer(
                'unsqueeze', inputs=val_x, output=node, param_attr=attr)
484

485
    @print_mapping_info
C
channingss 已提交
486
    def Shrink(self, node):
C
channingss 已提交
487
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
488 489 490 491
        bias = node.get_attr('bias')
        lambd = node.get_attr('lambd')
        assert bias == 0.0, 'not support bias!=0'
        attr = {'threshold': lambd, 'name': node.layer_name}
492 493
        node.fluid_code.add_layer(
            'hard_shrink', inputs=val_x, output=node, param_attr=attr)
C
channingss 已提交
494

495
    @print_mapping_info
C
update  
channingss 已提交
496 497 498 499 500 501 502 503
    def Constant(self, node):
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        value = node.get_attr('value')
        dtype = np.dtype(value.dtype)
        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'tensor dtype unmatches storage dtype'
R
root 已提交
504

C
update  
channingss 已提交
505
        shape = node.get_attr('shape', None)
R
root 已提交
506

C
update  
channingss 已提交
507
        if shape is None:
C
channingss 已提交
508
            shape = val_output.out_shapes[0]
C
update  
channingss 已提交
509 510
        if shape is None:
            shape = list(value.shape)
511 512 513 514
            _logger.warning('in (Constant -> %s): '
                            'attribute "shape" of %s not inferred, '
                            'using value as 1-D tensor may lead to fails',
                            val_output.layer_name, val_output.layer_name)
515
        if len(value) == 1:
C
channingss 已提交
516
            value = value.tolist()
C
update  
channingss 已提交
517 518 519 520 521
            shape = [1]
            value = value[0]
            if dtype.name == 'int64':
                dtype = 'int32'
            attr = {'shape': shape, 'dtype': string(dtype), 'value': value}
522 523
            node.fluid_code.add_layer(
                'fill_constant', inputs=None, output=node, param_attr=attr)
C
channingss 已提交
524
        else:
525 526
            if dtype.name == 'uint8':
                dtype = 'int64'
C
channingss 已提交
527 528 529 530 531 532 533 534
            value = np.reshape(value, shape)
            self.weights[node.layer_name] = value
            attr = {
                'dtype': string(dtype),
                'shape': shape,
                'name': string(node.layer_name),
                'default_initializer': 'Constant(0.0)'
            }
535 536
            node.fluid_code.add_layer(
                "create_parameter", inputs=None, output=node, param_attr=attr)
C
update  
channingss 已提交
537

538
    @print_mapping_info
C
update  
channingss 已提交
539
    def Resize(self, node):
540 541
        self._interpolate(node)

542
    @print_mapping_info
543 544 545
    def Upsample(self, node):
        self._interpolate(node)

546 547 548 549 550 551 552 553 554 555 556 557 558 559 560
    @print_mapping_info
    def InstanceNormalization(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        epsilon = node.get_attr('epsilon', 1e-5)
        attr = {
            'epsilon': epsilon,
            'param_attr': string(val_scale.layer_name),
            'bias_attr': string(val_b.layer_name)
        }
        node.fluid_code.add_layer(
            "instance_norm", inputs=val_x, output=node, param_attr=attr)

    @print_mapping_info
561
    def Expand(self, node):
C
channingss 已提交
562
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
563
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
564 565

        if len(val_shape.outputs) == 1:
566 567
            self.omit_nodes.append(val_shape.layer_name)

C
channingss 已提交
568
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
569
        out_shape = node.out_shapes[0]
570
        val_x_dtype = val_x.dtype
R
root 已提交
571 572 573

        name_ones = node.layer_name + '_ones'
        attr_ones = {'shape': out_shape, 'dtype': string(val_x_dtype)}
574 575
        node.fluid_code.add_layer(
            'ones', inputs=None, output=name_ones, param_attr=attr_ones)
R
root 已提交
576 577
        inputs = {'x': name_ones, 'y': val_x}
        attr = {'name': string(node.layer_name)}
578 579 580 581 582
        node.fluid_code.add_layer(
            'elementwise_mul',
            inputs=inputs,
            output=node.layer_name,
            param_attr=attr)
C
update  
channingss 已提交
583

584
    @print_mapping_info
C
channingss 已提交
585 586 587 588
    def Gather(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        indices = self.graph.get_input_node(node, idx=1, copy=True)
        indices_shape = indices.out_shapes[0]
C
Channingss 已提交
589
        axis = node.get_attr('axis', 0)
590 591
        #assert len(
        #    indices_shape) <= 2, "Gather op don't support dim of indice >2 "
R
root 已提交
592
        if axis == 0 and len(indices_shape) <= 1:
593 594 595 596 597 598
            node.fluid_code.add_layer(
                'gather',
                inputs={'input': val_x,
                        'index': indices},
                output=node,
                param_attr=None)
C
channingss 已提交
599 600
        elif axis > 0 and len(indices_shape) <= 1:
            perm = list(range(len(val_x.out_shapes[0])))
C
channingss 已提交
601 602 603
            perm = [axis] + perm[:axis] + perm[axis + 1:]
            attr_trans = {'perm': perm}
            name_trans = val_x.layer_name + '_trans'
604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619
            node.fluid_code.add_layer(
                'transpose',
                inputs=val_x,
                output=name_trans,
                param_attr=attr_trans)
            node.fluid_code.add_layer(
                'gather',
                inputs={'input': name_trans,
                        'index': indices},
                output=node,
                param_attr=None)
            node.fluid_code.add_layer(
                'transpose', inputs=node, output=node, param_attr=attr_trans)
        elif axis == 0 and len(indices_shape) > 1:
            if val_x.out_shapes[0] is not None and isinstance(
                    val_x, ONNXGraphDataNode):
C
update  
Channingss 已提交
620 621 622 623 624
                node.fluid_code.add_layer(
                    'cast',
                    inputs=indices,
                    output=indices,
                    param_attr={'dtype': string('int64')})
625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662
                node.fluid_code.add_layer(
                    'embedding',
                    inputs=indices,
                    output=node,
                    use_fluid=True,
                    param_attr={
                        'param_attr': string(val_x.layer_name),
                        'size': val_x.out_shapes[0]
                    })
            else:
                from functools import reduce
                reshape_shape = reduce(lambda x, y: x * y, indices_shape)
                indices_reshape = indices.layer_name + '_shape'
                node.fluid_code.add_layer(
                    'reshape',
                    inputs=indices,
                    output=indices_reshape,
                    param_attr={'shape': [reshape_shape, ]})

                perm = list(range(len(val_x.out_shapes[0])))
                node.fluid_code.add_layer(
                    'gather',
                    inputs={'input': val_x,
                            'index': indices_reshape},
                    output=node,
                    param_attr=None)
                val_x_shape = val_x.out_shapes[0]
                reshaped_shape = []
                for i in perm:
                    reshaped_shape.append(indices_shape[i])
                for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                    reshaped_shape.append(i)
                node.fluid_code.add_layer(
                    'reshape',
                    inputs=node,
                    output=node,
                    param_attr={'shape': reshaped_shape})
        elif axis > 0 and len(indices_shape) > 1:
C
Channingss 已提交
663
            from functools import reduce
R
root 已提交
664
            reshape_shape = reduce(lambda x, y: x * y, indices_shape)
665 666 667 668 669 670
            indices_reshape = indices.layer_name + '_shape'
            node.fluid_code.add_layer(
                'reshape',
                inputs=indices,
                output=indices_reshape,
                param_attr={'shape': [reshape_shape, ]})
R
root 已提交
671

C
Channingss 已提交
672 673 674 675
            perm = list(range(len(val_x.out_shapes[0])))
            perm = [axis] + perm[:axis] + perm[axis + 1:]
            attr_trans = {'perm': perm}
            name_trans = val_x.layer_name + '_trans'
676 677 678 679 680 681 682 683 684 685 686 687 688
            node.fluid_code.add_layer(
                'transpose',
                inputs=val_x,
                output=name_trans,
                param_attr=attr_trans)
            node.fluid_code.add_layer(
                'gather',
                inputs={'input': name_trans,
                        'index': indices_reshape},
                output=node,
                param_attr=None)
            node.fluid_code.add_layer(
                'transpose', inputs=node, output=node, param_attr=attr_trans)
C
Channingss 已提交
689 690 691 692 693 694
            val_x_shape = val_x.out_shapes[0]
            reshaped_shape = []
            for i in perm:
                reshaped_shape.append(indices_shape[i])
            for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                reshaped_shape.append(i)
695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714
            node.fluid_code.add_layer(
                'reshape',
                inputs=node,
                output=node,
                param_attr={'shape': reshaped_shape})

    @print_mapping_info
    def Range(self, node):
        val_start = self.graph.get_input_node(node, idx=0, copy=True)
        val_limit = self.graph.get_input_node(node, idx=1, copy=True)
        val_delta = self.graph.get_input_node(node, idx=2, copy=True)
        dtype = val_start.dtype
        inputs = {'start': val_start, 'end': val_limit, 'step': val_delta}
        node.fluid_code.add_layer(
            'range',
            inputs=inputs,
            output=node,
            param_attr={'dtype': string(dtype)})

    @print_mapping_info
C
channingss 已提交
715
    def Slice(self, node):
C
channingss 已提交
716
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channings 已提交
717
        starts, ends, axes, steps = None, None, None, None
718
        attr = {}
C
channingss 已提交
719 720 721
        if len(node.inputs) > 1:
            starts = self.graph.get_input_node(node, idx=1, copy=True)
            ends = self.graph.get_input_node(node, idx=2, copy=True)
R
root 已提交
722
            if len(node.inputs) > 3:
C
channings 已提交
723
                axes = self.graph.get_input_node(node, idx=3, copy=True)
C
Channingss 已提交
724
                axes = _const_weight_or_none(axes, necessary=True)
R
root 已提交
725
            if len(node.inputs) > 4:
C
channings 已提交
726
                steps = self.graph.get_input_node(node, idx=4, copy=True)
C
update  
Channingss 已提交
727
                steps = _const_weight_or_none(steps)
728 729 730 731 732 733 734
                if steps is not None:
                    assert steps == 1, "Only support convert op:Slice, which attribute:steps == 1"
            attr = {
                "axes": axes,
                "starts": starts.layer_name,
                "ends": ends.layer_name
            }
C
update  
Channingss 已提交
735 736
            starts_value = _const_weight_or_none(starts)
            ends_value = _const_weight_or_none(ends)
737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761
            if starts_value is not None and ends_value is not None:
                self.omit_nodes.append(starts.layer_name)
                self.omit_nodes.append(ends.layer_name)
                ends_value = ends_value.copy()
                for idx in range(len(ends_value)):
                    if ends_value[idx] > 2**31 - 1:
                        ends_value[idx] = 2**31 - 1
                attr = {
                    "axes": axes,
                    "starts": starts_value,
                    "ends": ends_value
                }
            else:
                if starts.dtype != 'int32':
                    node.fluid_code.add_layer(
                        'cast',
                        inputs=starts,
                        output=starts,
                        param_attr={'dtype': string('int32')})
                if ends.dtype != 'int32':
                    node.fluid_code.add_layer(
                        'cast',
                        inputs=ends,
                        output=ends,
                        param_attr={'dtype': string('int32')})
C
channingss 已提交
762 763 764 765
        else:
            starts = node.get_attr('starts')
            ends = node.get_attr('ends')
            axes = node.get_attr('axes')
766 767 768 769
            for idx in range(len(ends)):
                if ends[idx] > 2**31 - 1:
                    ends[idx] = 2**31 - 1
            attr = {"axes": axes, "starts": starts, "ends": ends}
C
channingss 已提交
770

771 772
        node.fluid_code.add_layer(
            'slice', inputs=val_x, output=node, param_attr=attr)
C
channingss 已提交
773

774
    @print_mapping_info
C
update  
channingss 已提交
775
    def ConstantOfShape(self, node):
C
channingss 已提交
776
        val_shape = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
777
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
C
update  
channingss 已提交
778 779 780 781

        value = node.get_attr('value')
        dtype = value.dtype
        value = value.tolist()
782 783
        assert len(value) == 1, ('given value not Scalar, shape of value > 1, '
                                 'this is not supported')
C
update  
channingss 已提交
784 785 786 787
        if len(value) == 1:
            value = value[0]
            if dtype.name == 'int64':
                dtype = 'int32'
788 789 790 791 792 793 794
            attr = {
                'shape': val_shape.layer_name,
                'dtype': string(dtype),
                'value': value
            }
            node.fluid_code.add_layer(
                'fill_constant', inputs=None, output=node, param_attr=attr)
C
update  
channingss 已提交
795

796
    @print_mapping_info
C
update  
channingss 已提交
797
    def Split(self, node):
C
channingss 已提交
798 799
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
C
update  
channingss 已提交
800 801

        fluid_op = 'split'
C
channingss 已提交
802
        split = node.get_attr('split')
C
update  
channingss 已提交
803
        axis = node.get_attr('axis', 0)
C
channingss 已提交
804 805 806 807 808
        attr = {
            'num_or_sections': split,
            'dim': axis,
            'name': string(node.layer_name)
        }
R
root 已提交
809

810 811
        node.fluid_code.add_layer(
            'split', inputs=val_x, output=val_y, param_attr=attr)
C
update  
channingss 已提交
812

813
    @print_mapping_info
C
update  
channingss 已提交
814
    def Reshape(self, node):
C
channingss 已提交
815 816
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
817
        val_reshaped = self.graph.get_node(node.layer.output[0], copy=True)
818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834
        attr = {}
        shape_value = _const_weight_or_none(val_shape)
        shape_dims = len(val_shape.out_shapes[0])

        if shape_value is not None:
            node.fluid_code.add_layer(
                'reshape',
                inputs={'x': val_x},
                output=node,
                param_attr={'shape': shape_value.tolist()})
        elif val_shape.dtype == 'int64':
            val_shape_cast = val_shape.layer_name + '_cast'
            node.fluid_code.add_layer(
                'cast',
                inputs=val_shape,
                output=val_shape_cast,
                param_attr={'dtype': string('int32')})
835 836 837 838 839 840 841
            # shape may be [], come form Gather by scalar indices
            if len(val_shape.out_shapes[0]) > 0:
                node.fluid_code.add_layer(
                    'reshape',
                    inputs=val_shape_cast,
                    output=val_shape_cast,
                    param_attr={'shape': val_shape.out_shapes[0]})
842 843 844 845 846 847 848
            node.fluid_code.add_layer(
                'reshape',
                inputs={'x': val_x,
                        'shape': val_shape_cast},
                output=node,
                param_attr=attr)
        else:
849 850 851 852 853 854 855
            # shape may be [], come form Gather by scalar indices
            if len(val_shape.out_shapes[0]) > 0:
                node.fluid_code.add_layer(
                    'reshape',
                    inputs=val_shape,
                    output=val_shape,
                    param_attr={'shape': val_shape.out_shapes[0]})
856 857 858 859 860 861 862 863
            node.fluid_code.add_layer(
                'reshape',
                inputs={'x': val_x,
                        'shape': val_shape},
                output=node,
                param_attr=attr)

    @print_mapping_info
C
update  
channingss 已提交
864
    def Cast(self, node):
C
channingss 已提交
865
        val_input = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
866 867 868 869 870 871 872 873 874 875
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        dtype = node.get_attr('to')
        if not isinstance(dtype, np.dtype):
            dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]

        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'dtype of to unmatches output'
        attr = {'dtype': string(dtype)}
876 877
        node.fluid_code.add_layer(
            'cast', inputs=val_input, output=node, param_attr=attr)
C
update  
channingss 已提交
878

879
    @print_mapping_info
C
update  
channingss 已提交
880
    def AveragePool(self, node):
C
channingss 已提交
881
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
882 883

        auto_pad = node.get_attr('auto_pad', 'NOTSET')
C
update  
channingss 已提交
884 885 886 887 888 889 890 891
        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))
        pads = node.get_attr('pads', [0] * (poolnd * 2))
        fluid_op = 'pool{}d'.format(poolnd)
        assert 2 <= poolnd <= 3, 'only pool2d and pool3d is supported'
C
channingss 已提交
892

C
channingss 已提交
893 894
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
895
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
channingss 已提交
896
            input_shape = val_x.out_shapes[0]
C
Channingss 已提交
897 898 899 900 901
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
C
channingss 已提交
902

C
update  
channingss 已提交
903 904 905 906 907 908 909 910 911 912
        attr = {
            "pool_size": kernel_shape,
            "pool_type": string('avg'),
            "pool_stride": strides,
            "pool_padding": paddings,
            "ceil_mode": ceil_mode,
            "exclusive": 'True',
            "name": string(node.layer_name)
        }

913 914
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
915

916
    @print_mapping_info
C
update  
channingss 已提交
917 918 919
    def Concat(self, node):
        inputs = []
        for i in range(len(node.layer.input)):
C
channingss 已提交
920
            ipt = self.graph.get_input_node(node, idx=i, copy=True)
C
update  
channingss 已提交
921 922 923 924 925 926
            if isinstance(ipt, str):
                inputs.append(ipt)
            else:
                inputs.append(ipt.layer_name)
        axis = node.get_attr('axis')
        attr = {'axis': axis}
927 928
        node.fluid_code.add_layer(
            'concat', inputs=inputs, output=node, param_attr=attr)
C
update  
channingss 已提交
929

930
    @print_mapping_info
C
update  
channingss 已提交
931
    def Flatten(self, node):
C
channingss 已提交
932
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
933 934
        axis = node.get_attr('axis', 1)
        attr = {"axis": str(axis), "name": string(node.layer_name)}
935 936
        node.fluid_code.add_layer(
            'flatten', inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
937

938
    @print_mapping_info
C
update  
channingss 已提交
939
    def Gemm(self, node):
C
channingss 已提交
940 941 942
        val_a = self.graph.get_input_node(node, idx=0, copy=True)
        val_b = self.graph.get_input_node(node, idx=1, copy=True)
        val_c = self.graph.get_input_node(node, idx=2, copy=True)
C
update  
channingss 已提交
943 944 945 946 947 948 949 950 951 952 953 954 955

        alpha = node.get_attr('alpha', 1.)  # optional
        beta = node.get_attr('beta', 1.)  # optional
        trans_a = bool(node.get_attr('transA', 0))  # optional
        trans_b = bool(node.get_attr('transB', 0))  # optional
        val_mm = node.layer_name + '_mm'
        matmul_inputs = {"x": val_a, "y": val_b}
        attr_matmul = {
            "transpose_x": trans_a,
            "transpose_y": trans_b,
            "alpha": alpha,
            "name": string(val_mm)
        }
956 957 958 959 960
        node.fluid_code.add_layer(
            'matmul',
            inputs=matmul_inputs,
            output=val_mm,
            param_attr=attr_matmul)
C
channingss 已提交
961

C
update  
channingss 已提交
962 963 964 965
        if beta != 0:
            if beta == 1.:
                add_inputs = {"x": val_mm, "y": val_c}
                attr = {"name": string(node.layer_name)}
966 967 968 969 970
                node.fluid_code.add_layer(
                    "elementwise_add",
                    inputs=add_inputs,
                    output=node,
                    param_attr=attr)
C
update  
channingss 已提交
971
            else:
C
channingss 已提交
972 973
                var_beta = node.layer_name + '_beta'
                matmul_beta_inputs = {"x": val_c, "y": var_beta}
974 975 976 977 978
                node.fluid_code.add_layer(
                    "Constant",
                    inputs=matmul_beta_inputs,
                    output=var_beta,
                    param_attr={'value': beta})
C
channingss 已提交
979 980 981

                add_inputs = {"x": val_mm, "y": var_beta}
                attr = {"name": string(node.layer_name)}
982 983 984 985 986
                node.fluid_code.add_layer(
                    "elementwise_add",
                    inputs=add_inputs,
                    output=node,
                    param_attr=attr)
C
update  
channingss 已提交
987

988
    @print_mapping_info
C
update  
channingss 已提交
989
    def Sum(self, node):
990
        val_inps = node.layer.input
991
        inputs = {
992 993 994 995
            "x": self.graph.get_input_node(
                node, idx=0, copy=True),
            "y": self.graph.get_input_node(
                node, idx=1, copy=True),
996 997
        }
        node.fluid_code.add_layer("elementwise_add", inputs=inputs, output=node)
998

C
channingss 已提交
999 1000
        for idx, ipt in enumerate(val_inps[2:]):
            y = self.graph.get_input_node(node, idx=idx, copy=True)
1001 1002
            inputs = {
                "x": node.layer_name,
C
channingss 已提交
1003
                "y": y,
1004
            }
1005 1006
            node.fluid_code.add_layer(
                "elementwise_add", inputs=inputs, output=node)
C
update  
channingss 已提交
1007

1008
    @print_mapping_info
C
update  
channingss 已提交
1009
    def MatMul(self, node):
C
channingss 已提交
1010 1011
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1012 1013
        inputs = {"x": val_x, "y": val_y}
        attr = {"name": string(node.layer_name)}
1014 1015
        node.fluid_code.add_layer(
            "matmul", inputs=inputs, output=node, param_attr=attr)
C
update  
channingss 已提交
1016

1017
    @print_mapping_info
C
update  
channingss 已提交
1018
    def BatchNormalization(self, node):
C
channingss 已提交
1019 1020 1021 1022 1023
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        val_mean = self.graph.get_input_node(node, idx=3, copy=True)
        val_var = self.graph.get_input_node(node, idx=4, copy=True)
C
update  
channingss 已提交
1024 1025 1026 1027 1028 1029 1030 1031 1032

        self.omit_nodes.append(val_scale.layer_name)
        self.omit_nodes.append(val_b.layer_name)
        self.omit_nodes.append(val_mean.layer_name)
        self.omit_nodes.append(val_var.layer_name)

        momentum = node.get_attr('momentum', .9)
        epsilon = node.get_attr('epsilon', 1e-5)

C
channingss 已提交
1033 1034
        # Attribute: spatial is used in BatchNormalization-1,6,7
        spatial = bool(node.get_attr('spatial'))
C
update  
channingss 已提交
1035 1036 1037 1038
        attr = {
            "momentum": momentum,
            "epsilon": epsilon,
            "data_layout": string('NCHW'),
C
channingss 已提交
1039
            "is_test": True,
C
update  
channingss 已提交
1040 1041 1042 1043
            "param_attr": string(val_scale.layer_name),
            "bias_attr": string(val_b.layer_name),
            "moving_mean_name": string(val_mean.layer_name),
            "moving_variance_name": string(val_var.layer_name),
C
channingss 已提交
1044
            "use_global_stats": spatial,
C
update  
channingss 已提交
1045 1046
            "name": string(node.layer_name)
        }
1047 1048
        node.fluid_code.add_layer(
            "batch_norm", inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1049

1050
    @print_mapping_info
C
update  
channingss 已提交
1051
    def Transpose(self, node):
C
channingss 已提交
1052
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1053 1054
        perm = node.get_attr('perm')
        attr = {'perm': perm, "name": string(node.layer_name)}
1055 1056
        node.fluid_code.add_layer(
            "transpose", inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1057

1058
    @print_mapping_info
C
update  
channingss 已提交
1059
    def Relu(self, node):
C
channingss 已提交
1060
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1061
        attr = {"name": string(node.layer_name)}
1062 1063
        node.fluid_code.add_layer(
            "relu", inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1064

1065
    @print_mapping_info
C
update  
channingss 已提交
1066
    def PRelu(self, node):
C
channingss 已提交
1067 1068
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_slope = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1069

C
channingss 已提交
1070 1071 1072 1073 1074 1075 1076 1077 1078 1079
        mode = 'channel'
        shape_slope = val_slope.out_shapes[0]
        if len(shape_slope) == 1:
            mode = 'all'
        elif len(shape_slope) > 2:
            mode = 'element'
        attr = {
            "param_attr": string(val_slope.layer_name),
            'mode': string(mode)
        }
1080 1081
        node.fluid_code.add_layer(
            "prelu", inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1082

1083
    @print_mapping_info
C
update  
channingss 已提交
1084
    def Squeeze(self, node):
C
channingss 已提交
1085 1086 1087
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        axes = node.get_attr('axes')
        attr = {'axes': axes, "name": string(node.layer_name)}
1088 1089 1090 1091 1092 1093 1094 1095 1096
        if len(val_x.out_shapes[0]) == 1:
            node.fluid_code.add_layer(
                "cast",
                inputs=val_x,
                output=node,
                param_attr={'dtype': string(val_x.dtype)})
        else:
            node.fluid_code.add_layer(
                "squeeze", inputs=val_x, output=node, param_attr=attr)
R
root 已提交
1097

1098
    @print_mapping_info
C
channings 已提交
1099 1100 1101
    def Equal(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
1102 1103 1104 1105 1106 1107 1108
        node.fluid_code.add_layer(
            "equal",
            inputs={'x': val_x,
                    'y': val_y},
            output=node,
            param_attr=None)

C
Channingss 已提交
1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119
    @print_mapping_info
    def Greater(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
        node.fluid_code.add_layer(
            "greater_than",
            inputs={'x': val_x,
                    'y': val_y},
            output=node,
            param_attr=None)

1120
    @print_mapping_info
C
channings 已提交
1121 1122 1123 1124
    def Where(self, node):
        condition = self.graph.get_input_node(node, idx=0, copy=True)
        val_x = self.graph.get_input_node(node, idx=1, copy=True)
        val_y = self.graph.get_input_node(node, idx=2, copy=True)
R
root 已提交
1125

C
channings 已提交
1126
        not_condition = condition.layer_name + '_not'
1127 1128 1129 1130 1131
        node.fluid_code.add_layer(
            "logical_not",
            inputs=condition,
            output=not_condition,
            param_attr=None)
R
root 已提交
1132
        cast_not_condition = not_condition + '_cast'
1133 1134 1135 1136 1137
        node.fluid_code.add_layer(
            "cast",
            inputs=not_condition,
            output=cast_not_condition,
            param_attr={'dtype': string(val_x.dtype)})
C
channings 已提交
1138
        cast_condition = condition.layer_name + '_cast'
1139 1140 1141 1142 1143
        node.fluid_code.add_layer(
            "cast",
            inputs=condition,
            output=cast_condition,
            param_attr={'dtype': string(val_x.dtype)})
R
root 已提交
1144
        mul_val_x = val_x.layer_name + '_mul'
1145 1146 1147 1148 1149 1150
        node.fluid_code.add_layer(
            "elementwise_mul",
            inputs={'x': val_x,
                    'y': cast_condition},
            output=mul_val_x,
            param_attr=None)
C
channings 已提交
1151
        mul_val_y = val_y.layer_name + '_mul'
1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166
        node.fluid_code.add_layer(
            "elementwise_mul",
            inputs={'x': val_y,
                    'y': cast_not_condition},
            output=mul_val_y,
            param_attr=None)

        node.fluid_code.add_layer(
            "elementwise_add",
            inputs={'x': mul_val_x,
                    'y': mul_val_y},
            output=node,
            param_attr=None)

    @print_mapping_info
R
root 已提交
1167 1168
    def NonZero(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187
        val_x_dim = len(val_x.out_shapes[0])
        if val_x_dim == 1:
            node.fluid_code.add_layer("nonzero", inputs=val_x, output=val_x)
            node.fluid_code.add_layer(
                "transpose",
                inputs=val_x,
                output=node,
                param_attr={'perm': [1, 0]})
        if val_x_dim > 1:
            node.fluid_code.add_layer("nonzero", inputs=val_x, output=val_x)
            node.fluid_code.add_layer(
                "split",
                inputs=val_x,
                output=val_x,
                param_attr={'num_or_sections': 1,
                            'dim': val_x_dim})
            node.fluid_code.add_layer("concat", inputs=val_x, output=node)

    @print_mapping_info
C
update  
channingss 已提交
1188
    def Identity(self, node):
C
channingss 已提交
1189
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1190
        node.fluid_code.add_layer("assign", inputs=val_x, output=node)
R
root 已提交
1191

1192
    @print_mapping_info
C
channings 已提交
1193 1194 1195 1196
    def Tile(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_repeats = self.graph.get_input_node(node, idx=1, copy=True)
        repeats = _const_weight_or_none(val_repeats)
R
root 已提交
1197

1198 1199 1200
        if repeats is None:
            repeats = val_repeats.layer_name
        elif isinstance(repeats, int):
C
channings 已提交
1201
            repeats = [repeats]
R
root 已提交
1202

C
channings 已提交
1203
        attr = {
R
root 已提交
1204
            'expand_times': repeats,
C
channings 已提交
1205 1206
            "name": string(node.layer_name),
        }
1207 1208
        node.fluid_code.add_layer(
            "expand", inputs=val_x, output=node, param_attr=attr)
R
root 已提交
1209

1210
    @print_mapping_info
C
update  
channingss 已提交
1211
    def MaxPool(self, node):
C
channingss 已提交
1212
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
1213
        auto_pad = node.get_attr('auto_pad', 'NOTSET')
C
update  
channingss 已提交
1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224
        assert node.get_attr(
            "dilations") is None, 'only dilations = 0 is supported'  # optional

        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))  # optional
        pads = node.get_attr('pads', [0] * (poolnd * 2))  # optional
        fluid_op = 'pool{}d'.format(poolnd)
        assert 2 <= poolnd <= 3, 'only pool2d and pool3d is supported'
C
channingss 已提交
1225

C
channingss 已提交
1226 1227
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1228
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
channingss 已提交
1229
            input_shape = val_x.out_shapes[0]
C
Channingss 已提交
1230 1231 1232 1233 1234
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
C
channingss 已提交
1235

C
update  
channingss 已提交
1236 1237 1238 1239 1240 1241 1242 1243 1244
        attr = {
            "pool_size": kernel_shape,
            "pool_type": string("max"),
            "pool_stride": strides,
            "pool_padding": paddings,
            "ceil_mode": ceil_mode,
            "name": string(node.layer_name),
            "exclusive": False
        }
1245 1246
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1247

C
channings 已提交
1248
    def _global_pool(self, node):
C
channingss 已提交
1249
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1250
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
1251
        fluid_op = 'pool2d'
C
channings 已提交
1252 1253 1254 1255 1256 1257
        pool_type = None
        if node.layer.op_type == 'GlobalMaxPool':
            pool_type = 'max'
        elif node.layer.op_type == 'GlobalAveragePool':
            pool_type = 'avg'

C
update  
channingss 已提交
1258
        attr = {
C
channings 已提交
1259
            "pool_type": string(pool_type),
C
update  
channingss 已提交
1260 1261 1262
            "global_pooling": True,
            "name": string(node.layer_name)
        }
1263 1264
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)
R
root 已提交
1265

1266
    @print_mapping_info
C
channings 已提交
1267 1268
    def GlobalMaxPool(self, node):
        self._global_pool(node)
R
root 已提交
1269

1270
    @print_mapping_info
C
channings 已提交
1271 1272
    def GlobalAveragePool(self, node):
        self._global_pool(node)
R
root 已提交
1273

1274
    @print_mapping_info
C
update  
channingss 已提交
1275
    def Conv(self, node):
C
channingss 已提交
1276 1277
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1278 1279 1280 1281 1282 1283
        val_y = self.graph.get_node(node.layer.output[0], copy=True)

        self.omit_nodes.append(val_w.layer_name)

        has_bias = len(node.layer.input) == 3
        if has_bias:
C
channingss 已提交
1284
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
C
update  
channingss 已提交
1285 1286 1287
            self.omit_nodes.append(val_b.layer_name)
        auto_pad = node.get_attr('auto_pad', 'NOTSET')

C
channingss 已提交
1288
        kernel_shape = node.get_attr('kernel_shape')
C
update  
channingss 已提交
1289 1290
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only conv2d and conv3d is supported'
C
Channingss 已提交
1291
        num_out_channels = val_w.out_shapes[0][0]
C
update  
channingss 已提交
1292 1293 1294
        fluid_op = 'conv{}d'.format(convnd)

        num_groups = node.get_attr('group', 1)
C
Channingss 已提交
1295 1296 1297
        strides = node.get_attr('strides', [1] * convnd)
        dilations = node.get_attr('dilations', [1] * convnd)
        pads = node.get_attr('pads', [0] * (convnd * 2))
C
update  
channingss 已提交
1298

C
channingss 已提交
1299
        input_shape = val_x.out_shapes[0]
C
update  
channingss 已提交
1300 1301
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1302
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
Channingss 已提交
1303 1304 1305 1306 1307
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
C
update  
channingss 已提交
1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322

        attr = {
            "num_filters": num_out_channels,
            "filter_size": kernel_shape,
            "stride": strides,
            "padding": paddings,
            "dilation": dilations,
            "groups": num_groups,
            'param_attr': string(val_w.layer_name),
            "name": string(node.layer_name)
        }
        if has_bias:
            attr["bias_attr"] = string(val_b.layer_name)
        else:
            attr["bias_attr"] = False
1323 1324
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)
C
channingss 已提交
1325

1326
    @print_mapping_info
C
channingss 已提交
1327
    def ConvTranspose(self, node):
C
channingss 已提交
1328 1329
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
C
channingss 已提交
1330
        val_b = None
R
root 已提交
1331
        if len(node.layer.input) > 2:
C
channingss 已提交
1332 1333
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
            self.omit_nodes.append(val_b.layer_name)
C
channingss 已提交
1334 1335 1336 1337 1338 1339
        self.omit_nodes.append(val_w.layer_name)

        val_y = self.graph.get_node(node.layer.output[0], copy=True)

        auto_pad = node.get_attr('auto_pad', 'NOTSET')
        out_padding = node.get_attr('output_padding', [0, 0])
C
channingss 已提交
1340
        kernel_shape = node.get_attr('kernel_shape')
C
channingss 已提交
1341 1342 1343
        assert kernel_shape, 'kernel_shape not inferred'
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only conv2d_transpose and conv3d_transpose supported'
C
channingss 已提交
1344
        num_out_channels = val_w.out_shapes[0][1]
C
channingss 已提交
1345 1346
        fluid_op = 'conv{}d_transpose'.format(convnd)

C
channingss 已提交
1347 1348 1349 1350 1351
        num_groups = node.get_attr('group', 1)
        strides = node.get_attr('strides', [1] * convnd)
        dilations = node.get_attr('dilations', [1] * convnd)
        output_size = node.get_attr('output_shape', [])
        pads = node.get_attr('pads', [0] * (convnd * 2))
C
channingss 已提交
1352 1353 1354 1355

        paddings, var_x = self._pad_if_asymmetric(node, pads, val_x)

        output_size = [0, 0]
C
channingss 已提交
1356

1357 1358
        output_size[0] = (val_x.out_shapes[0][2] - 1
                          ) * strides[0] - 2 * paddings[0] + dilations[0] * (
C
channingss 已提交
1359
                              kernel_shape[0] - 1) + 1 + out_padding[0]
1360 1361
        output_size[1] = (val_x.out_shapes[0][3] - 1
                          ) * strides[1] - 2 * paddings[1] + dilations[1] * (
C
channingss 已提交
1362 1363 1364 1365 1366 1367 1368 1369 1370 1371
                              kernel_shape[1] - 1) + 1 + out_padding[1]
        attr = {
            'num_filters': num_out_channels,
            'output_size': output_size or None,
            'filter_size': kernel_shape,
            'padding': paddings,
            'stride': strides,
            'dilation': dilations,
            'groups': num_groups,
            'param_attr': string(val_w.layer_name),
C
channingss 已提交
1372
            'bias_attr': None if val_b is None else string(val_b.layer_name),
C
channingss 已提交
1373 1374
            'name': string(node.layer_name),
        }
1375 1376
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)