首页 Paddle框架 帖子详情
dygraph
收藏
快速回复
Paddle框架 问答深度学习 903 2
dygraph
收藏
快速回复
Paddle框架 问答深度学习 903 2

Hi,

Not sure why this happens.

I defined a class as following with fluid.dygraph.Layer.

class DeConv2D(fluid.dygraph.Layer):
    def __init__(self,
            name_scope,
            num_filters=64,
            filter_size=7,
            stride=1,
            stddev=0.02,
            padding=[0,0],
            outpadding=[0,0,0,0],
            relu=True,
            norm=True,
            relufactor=0.0,
            use_bias=False
            ):
        super(DeConv2D,self).__init__(name_scope)

        if use_bias == False:
            de_bias_attr = False
        else:
            de_bias_attr = fluid.ParamAttr(name="de_bias",initializer=fluid.initializer.Constant(0.0))

        self._deconv = Conv2DTranspose(self.full_name(),
                                        num_filters,
                                        filter_size=filter_size,
                                        stride=stride,
                                        padding=padding,
                                        param_attr=fluid.ParamAttr(
                                            name="this_is_deconv_weights",
                                            initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=stddev)),
                                        bias_attr=de_bias_attr)



        if norm:
            self.bn = BatchNorm(self.full_name(),
                num_channels=num_filters,
                param_attr=fluid.ParamAttr(
                    name="de_wights",
                    initializer=fluid.initializer.NormalInitializer(1.0, 0.02)),
                bias_attr=fluid.ParamAttr(name="de_bn_bias",initializer=fluid.initializer.Constant(0.0)),
                trainable_statistics=True)        
        self.outpadding = outpadding
        self.relufactor = relufactor
        self.use_bias = use_bias
        self.norm = norm
        self.relu = relu

    def forward(self,inputs):
        #todo: add use_bias
        #if self.use_bias==False:
        with fluid.dygraph.guard():
            conv = self._deconv(inputs)
                  #else:
            #    conv = self._deconv(inputs)
            conv = fluid.layers.pad2d(conv, paddings=self.outpadding, mode='constant', pad_value=0.0)
            conv = to_variable(conv)
            if self.norm:
                    conv = self.bn(conv)
            if self.relu:
                conv = fluid.layers.leaky_relu(conv,alpha=self.relufactor)
        return conv

But I got the following error when I called this class with a generator defined as following:

class generator(fluid.dygraph.Layer):
    # Network Architecture is exactly same as in infoGAN (https://arxiv.org/abs/1606.03657)
    # Architecture : FC1024_BR-FC7x7x128_BR-(64)4dc2s_BR-(1)4dc2s_S
    # from the main, we can see that input_dim=62, input_size=32 and output_dim=1
    def __init__(self, name_scope, input_dim=62, output_dim=1, input_size=32, norm=True):
        super(generator, self).__init__(name_scope)
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.input_size = input_size

        self.fc = Linear(name_scope=name_scope + '_fc', input_size=self.input_dim,
                         output_size=128 * (self.input_size // 8) * (self.input_size // 8))
        ##128 * (self.input_size // 8) * (self.input_size // 8)
        if norm:
            with fluid.dygraph.guard():
                self.bn = BatchNorm(self.full_name(),
                    num_channels=128 * (self.input_size // 8) * (self.input_size // 8),
                    param_attr=fluid.ParamAttr(
                        name="scale",
                        initializer=fluid.initializer.NormalInitializer(1.0,0.02)),
                    bias_attr=fluid.ParamAttr(
                        name="bias",
                        initializer=fluid.initializer.Constant(0.0)),
                    trainable_statistics=True
                    )

        self.deconv = DeConv2D(self.full_name(),
                               num_filters=128,
                               filter_size=4,
                               stride=2,
                               stddev=0.02,
                               padding=[1, 1],
                               outpadding=[0, 1, 0, 1]
                               )

    def forward(self, input):
        x = self.fc(input)
        x = self.bn(x)
        x = fluid.layers.relu(x)
        x = fluid.layers.reshape(x, [-1, 128, (self.input_size // 8), (self.input_size // 8)])
        x = self.deconv(to_variable(x))
        #print('size of x in forward discriminator:{}'.format(x.shape))
        return x

The error information is as follows.

PaddleCheckError: unsupported type , must be Variable, list[Variable] or tuple[Variable] at [/paddle/paddle/fluid/pybind/imperative.cc:143]

The error line is conv = self.bn(conv).

Thanks,

0
收藏
回复
全部评论(2)
时间顺序
AIStudio784461
#2 回复于2020-01

Sorry, the error line is not self.bn那一行,而是
x = self.deconv(to_variable(x))

0
回复
AIStudio784461
#4 回复于2020-01

Thanks, this problem is resolved. But I think that your suggestion is on the right track. Best,

0
回复
需求/bug反馈?一键提issue告诉我们
发现bug?如果您知道修复办法,欢迎提pr直接参与建设飞桨~
在@后输入用户全名并按空格结束,可艾特全站任一用户