提交 e3340a1e 编写于 作者: M michaelowenliu

update syncbn package and modidy parameters in syncbn

上级 fe1b91b7
......@@ -16,16 +16,17 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, Dropout
from paddle.fluid.dygraph import SyncBatchNorm as BatchNorm
import math
from paddle.nn import SyncBatchNorm as BatchNorm
from dygraph.models.architectures import layer_utils
from dygraph.cvlibs import manager
__all__ = [
......@@ -252,19 +253,18 @@ class ConvBNLayer(fluid.dygraph.Layer):
bias_attr=False,
use_cudnn=use_cudnn,
act=None)
self.bn = fluid.dygraph.BatchNorm(
num_channels=out_c,
act=None,
param_attr=ParamAttr(
self.bn = BatchNorm(
num_features=out_c,
weight_attr=ParamAttr(
name=name + "_bn_scale",
regularizer=fluid.regularizer.L2DecayRegularizer(
regularization_coeff=0.0)),
bias_attr=ParamAttr(
name=name + "_bn_offset",
regularizer=fluid.regularizer.L2DecayRegularizer(
regularization_coeff=0.0)),
moving_mean_name=name + "_bn_mean",
moving_variance_name=name + "_bn_variance")
regularization_coeff=0.0)))
self._act_op = layer_utils.Activation(act=None)
def forward(self, x):
x = self.conv(x)
......
......@@ -25,10 +25,10 @@ import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, Dropout
from paddle.fluid.dygraph import SyncBatchNorm as BatchNorm
from paddle.nn import SyncBatchNorm as BatchNorm
from dygraph.utils import utils
from dygraph.models.architectures import layer_utils
from dygraph.cvlibs import manager
__all__ = [
......@@ -70,17 +70,17 @@ class ConvBNLayer(fluid.dygraph.Layer):
bn_name = "bn" + name[3:]
self._batch_norm = BatchNorm(
num_filters,
act=act,
param_attr=ParamAttr(name=bn_name + '_scale'),
bias_attr=ParamAttr(bn_name + '_offset'),
moving_mean_name=bn_name + '_mean',
moving_variance_name=bn_name + '_variance')
weight_attr=ParamAttr(name=bn_name + '_scale'),
bias_attr=ParamAttr(bn_name + '_offset'))
self._act_op = layer_utils.Activation(act=act)
def forward(self, inputs):
if self.is_vd_mode:
inputs = self._pool2d_avg(inputs)
y = self._conv(inputs)
y = self._batch_norm(y)
y = self._act_op(y)
return y
......
......@@ -3,8 +3,9 @@ import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, Dropout
from paddle.fluid.dygraph import SyncBatchNorm as BatchNorm
from paddle.nn import SyncBatchNorm as BatchNorm
from dygraph.models.architectures import layer_utils
from dygraph.cvlibs import manager
__all__ = ["Xception41_deeplab", "Xception65_deeplab", "Xception71_deeplab"]
......@@ -80,17 +81,17 @@ class ConvBNLayer(fluid.dygraph.Layer):
param_attr=ParamAttr(name=name + "/weights"),
bias_attr=False)
self._bn = BatchNorm(
num_channels=output_channels,
act=act,
num_features=output_channels,
epsilon=1e-3,
momentum=0.99,
param_attr=ParamAttr(name=name + "/BatchNorm/gamma"),
bias_attr=ParamAttr(name=name + "/BatchNorm/beta"),
moving_mean_name=name + "/BatchNorm/moving_mean",
moving_variance_name=name + "/BatchNorm/moving_variance")
weight_attr=ParamAttr(name=name + "/BatchNorm/gamma"),
bias_attr=ParamAttr(name=name + "/BatchNorm/beta"))
self._act_op = layer_utils.Activation(act=act)
def forward(self, inputs):
return self._bn(self._conv(inputs))
return self._act_op(self._bn(self._conv(inputs)))
class Seperate_Conv(fluid.dygraph.Layer):
......@@ -116,13 +117,13 @@ class Seperate_Conv(fluid.dygraph.Layer):
bias_attr=False)
self._bn1 = BatchNorm(
input_channels,
act=act,
epsilon=1e-3,
momentum=0.99,
param_attr=ParamAttr(name=name + "/depthwise/BatchNorm/gamma"),
bias_attr=ParamAttr(name=name + "/depthwise/BatchNorm/beta"),
moving_mean_name=name + "/depthwise/BatchNorm/moving_mean",
moving_variance_name=name + "/depthwise/BatchNorm/moving_variance")
weight_attr=ParamAttr(name=name + "/depthwise/BatchNorm/gamma"),
bias_attr=ParamAttr(name=name + "/depthwise/BatchNorm/beta"))
self._act_op1 = layer_utils.Activation(act=act)
self._conv2 = Conv2D(
input_channels,
output_channels,
......@@ -134,19 +135,21 @@ class Seperate_Conv(fluid.dygraph.Layer):
bias_attr=False)
self._bn2 = BatchNorm(
output_channels,
act=act,
epsilon=1e-3,
momentum=0.99,
param_attr=ParamAttr(name=name + "/pointwise/BatchNorm/gamma"),
bias_attr=ParamAttr(name=name + "/pointwise/BatchNorm/beta"),
moving_mean_name=name + "/pointwise/BatchNorm/moving_mean",
moving_variance_name=name + "/pointwise/BatchNorm/moving_variance")
weight_attr=ParamAttr(name=name + "/pointwise/BatchNorm/gamma"),
bias_attr=ParamAttr(name=name + "/pointwise/BatchNorm/beta"))
self._act_op2 = layer_utils.Activation(act=act)
def forward(self, inputs):
x = self._conv1(inputs)
x = self._bn1(x)
x = self._act_op1(x)
x = self._conv2(x)
x = self._bn2(x)
x = self._act_op2(x)
return x
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册