Skip to content

Commit

Permalink
Merge pull request #966 from qingqing01/batch_norm
Browse files Browse the repository at this point in the history
Fix bug in config_parse.py when batch_norm layer is used in RecurrentLayerGroup
  • Loading branch information
qingqing01 authored Dec 21, 2016
2 parents 446e3c2 + 567871f commit bbf3b47
Showing 1 changed file with 12 additions and 4 deletions.
16 changes: 12 additions & 4 deletions python/paddle/trainer/config_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -498,9 +498,16 @@ def __init__(
is_static=None,
is_shared=None,
update_hooks=None,
input_layer_argument=None, ):
input_layer_argument=None,
make_layer_name_in_submodel=True, ):
"""
@param make_layer_name_in_submodel True by defalut, you might need to
set it carefully when adding Input in config_parser.py.
"""
self.add_keys(locals())
self.input_layer_name = MakeLayerNameInSubmodel(input_layer_name)
self.input_layer_name = MakeLayerNameInSubmodel(
input_layer_name
) if make_layer_name_in_submodel else input_layer_name


# Define a projection for iexed layer
Expand Down Expand Up @@ -1848,7 +1855,8 @@ def __init__(self,
initial_std=0.0,
initial_mean=0.0,
is_static=True,
is_shared=is_shared, ))
is_shared=is_shared,
make_layer_name_in_submodel=False, ))

parallel_nn = bool(int(g_command_config_args.get("parallel_nn", 0)))
cudnn_version = int(g_command_config_args.get("cudnn_version", 0))
Expand Down Expand Up @@ -1880,7 +1888,7 @@ def __init__(self,
# when either of it is non-zero.
if input_layer.width != 0 or input_layer.height != 0:
self.set_cnn_layer(name, image_conf.img_size_y, image_conf.img_size,
image_conf.channels, True)
image_conf.channels, False)
else:
self.set_layer_size(input_layer.size)

Expand Down

0 comments on commit bbf3b47

Please sign in to comment.