提交 1e32a717 编写于 作者: H haonanyu 提交者: emailweixu

parameter sharing in fluid with simple test cases

上级 cb920ff1
# tilde
*~
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
......
# RL
Reinforcement learning framework
# PPRL
PaddlePaddle Reinforcement Learning Framework
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This file wraps Fluid layers that have parameters to support parameter sharing.
For other layers that don't have parameters, we simply copy them to this namespace.
"""
from paddle.fluid.layers import *
from layer_wrappers import *
此差异已折叠。
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import pprl.layers as layers
class TestParamName(unittest.TestCase):
def test_name_number(self):
self.fc1 = layers.fc(100)
self.fc2 = layers.fc(100)
self.fc3 = layers.fc(100, bias_attr=False)
self.fc4 = layers.fc(100, param_attr=False)
self.fc5 = layers.fc(100, name="fc", bias_attr=False)
self.embedding = layers.embedding((100, 128))
self.embedding_custom = layers.embedding(
(100, 128), name="embedding_custom")
self.conv2d = layers.conv2d(
num_filters=64,
filter_size=3,
name="my_conv2d",
set_paras=self.embedding.parameters())
self.dynamic_grus = []
for i in range(5):
self.dynamic_grus.append(layers.dynamic_gru(50))
## fc1 and fc2 have different parameters
self.assertEqual(self.fc1.param_name, "fc_0.w")
self.assertEqual(self.fc2.param_name, "fc_1.w")
## fc3 has no bias and fc4 has no param; so the names are None
self.assertEqual(self.fc3.bias_name, None)
self.assertEqual(self.fc4.param_name, None)
## fc5 has a custom name without a bias
## fc5 has a different param name with fc1
self.assertEqual(self.fc5.param_name, "fc_0_.w")
self.assertEqual(self.fc5.bias_name, None)
## embedding layer has no bias
self.assertEqual(self.embedding.param_name, "embedding_0.w")
self.assertEqual(self.embedding.bias_name, None)
## embedding layer with a custom name; the custom id is 1 up to this point
self.assertEqual(self.embedding_custom.param_name,
"embedding_custom_1_.w")
## conv2d shares param with embedding; has a custom bias name; the custom id is 2 now
self.assertEqual(self.conv2d.param_name, "embedding_0.w")
self.assertEqual(self.conv2d.bias_name, "my_conv2d_2_.wbias")
for i, gru in enumerate(self.dynamic_grus):
self.assertEqual(gru.param_name, "dynamic_gru_%d.w" % i)
if __name__ == '__main__':
unittest.main()
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import pprl.layers as layers
import paddle.fluid as fluid
import numpy as np
class TestParamSharing(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestParamSharing, self).__init__(*args, **kwargs)
self.fc1 = layers.fc(64, bias_attr=False)
self.fc2 = layers.fc(64, bias_attr=False)
self.fc3 = layers.fc(64, name="fc")
self.fc4 = layers.fc(64, name="fc")
## we bind the paras of self.embedding to those of self.fc1
self.embedding = layers.embedding(
(100, 64), set_paras=self.fc1.parameters())
def test_param_sharing(self):
"""
Test case for parameter sharing between layers of the same type
"""
main_program = fluid.Program()
startup_program = fluid.Program()
with fluid.program_guard(main_program, startup_program):
x = layers.data(name='x', shape=[100], dtype="float32")
y1 = self.fc1(input=x)
y11 = self.fc1(input=x)
y2 = self.fc2(input=x)
y3 = self.fc3(input=x)
y4 = self.fc4(input=x)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(startup_program)
batch_size = 10
input_x = np.random.uniform(0, 1, [batch_size, 100]).astype("float32")
outputs = exe.run(main_program,
feed={"x": input_x},
fetch_list=[y1, y11, y2, y3, y4])
self.assertEqual(
np.sum(outputs[0].flatten()), np.sum(outputs[1].flatten()))
self.assertNotEqual(
np.sum(outputs[1].flatten()), np.sum(outputs[2].flatten()))
self.assertNotEqual(
np.sum(outputs[3].flatten()), np.sum(outputs[4].flatten()))
def test_manual_param_sharing(self):
"""
Test case for parameter sharing between layers of different types
"""
batch_size = 10
dict_size = 100
main_program = fluid.Program()
startup_program = fluid.Program()
with fluid.program_guard(main_program, startup_program):
x = layers.data(name='x', shape=[1], dtype="int")
cx = layers.cast(
x=layers.one_hot(
input=x, depth=dict_size), dtype="float32")
## remove bias because embedding layer does not have one
y1 = self.fc1(input=cx)
y2 = self.embedding(input=x)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(startup_program)
input_x = np.random.randint(
dict_size, size=(batch_size, 1)).astype("int")
outputs = exe.run(main_program,
feed={'x': input_x},
fetch_list=[y1, y2])
self.assertEqual(
np.sum(outputs[0].flatten()), np.sum(outputs[1].flatten()))
if __name__ == "__main__":
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册