首页 Paddle框架 帖子详情
No Output(X@GRAD) found for BatchNormGrad operator.错误。求助
收藏
快速回复
Paddle框架 问答深度学习 593 2
No Output(X@GRAD) found for BatchNormGrad operator.错误。求助
收藏
快速回复
Paddle框架 问答深度学习 593 2

#
class DNN(paddle.nn.Layer):
def __init__(self, num_classes=num_classes, input_dim=input_dim ):
super(DNN,self).__init__()
self._norm_layer = nn.BatchNorm2D(1)
self.fc1 = Linear(in_features=64*96*input_dim, out_features=1000)
self.fc2 = Linear(in_features=1000, out_features=1000)
self.fc3 = Linear(in_features=1000, out_features=100)
self.fc4 = Linear(in_features=100, out_features=10)
self.fc5 = Linear(in_features=10, out_features=num_classes)
def forward(self,input):
print(input)
input = self._norm_layer(input)
print(input)
x = paddle.reshape(input, [input.shape[0], -1])
x = self.fc1(x)
x = F.sigmoid(x)
x = self.fc2(x)
x = F.sigmoid(x)
x = self.fc3(x)
x = F.sigmoid(x)
x = self.fc4(x)
x = F.sigmoid(x)
x = self.fc5(x)
return x

 

 

Tensor(shape=[32, 1, 64, 96], dtype=float32, place=CUDAPinnedPlace, stop_gradient=True,
[[[[5913.55175781, 5919.39160156, 5923.50341797, ..., 5953.44042969, 5950.81445312, 5946.70263672],
[5906.30273438, 5912.67578125, 5917.73779297, ..., 5956.63916016, 5953.61962891, 5949.05908203],
[5900.14111328, 5905.47851562, 5910.48437500, ..., 5958.42138672, 5955.31250000, 5950.51220703],
...,
[5510.79931641, 5510.37500000, 5509.90820312, ..., 5489.41894531, 5489.47753906, 5489.53515625],
[5509.33691406, 5509.10644531, 5508.88232422, ..., 5497.87353516, 5497.87011719, 5497.87060547],
[5508.46630859, 5508.46630859, 5508.46630859, ..., 5508.46630859, 5508.46630859, 5508.46630859]]],


[[[5856. , 5856. , 5856. , ..., 5894. , 5894. , 5896. ],
[5854. , 5854. , 5856. , ..., 5892. , 5892. , 5894. ],
[5852. , 5854. , 5854. , ..., 5888. , 5890. , 5892. ],
...,
[5600. , 5600. , 5600. , ..., 5546. , 5546. , 5544. ],
[5578. , 5578. , 5578. , ..., 5552. , 5552. , 5552. ],
[5558. , 5558. , 5558. , ..., 5558. , 5558. , 5558. ]]],


[[[5869.59082031, 5863.85986328, 5857.84570312, ..., 5936.05078125, 5934.53515625, 5930.29833984],
[5870.00585938, 5864.01464844, 5857.81738281, ..., 5936.76757812, 5935.41601562, 5931.51269531],
[5870.33984375, 5864.37841797, 5858.04980469, ..., 5937.62792969, 5936.60498047, 5932.99023438],
...,
[5186.68261719, 5187.18457031, 5187.70117188, ..., 5256.27490234, 5256.68212891, 5257.07421875],
[5197.59521484, 5197.83984375, 5198.09423828, ..., 5233.17041016, 5233.44970703, 5233.72509766],
[5214.31054688, 5214.31054688, 5214.31054688, ..., 5214.31054688, 5214.31054688, 5214.31054688]]],


...,


[[[5880.17919922, 5879.53808594, 5877.88623047, ..., 5932.43164062, 5933.09960938, 5932.67236328],
[5877.51513672, 5877.72460938, 5877.26806641, ..., 5928.21240234, 5929.49462891, 5929.83837891],
[5873.40185547, 5874.83398438, 5875.17138672, ..., 5922.30859375, 5924.29785156, 5925.17675781],
...,
[5339.62158203, 5339.36279297, 5339.07763672, ..., 5247.58496094, 5246.32714844, 5245.06250000],
[5309.01708984, 5308.94335938, 5308.91748047, ..., 5253.54687500, 5253.01855469, 5252.51269531],
[5272.57421875, 5272.57421875, 5272.57373047, ..., 5272.54541016, 5272.54541016, 5272.54541016]]],


[[[5846.64990234, 5848.92089844, 5851.43554688, ..., 5906.88574219, 5905.50390625, 5903.69726562],
[5845.58154297, 5847.34814453, 5849.47558594, ..., 5907.23730469, 5905.50927734, 5902.96435547],
[5841.83349609, 5844.13916016, 5845.75146484, ..., 5907.44775391, 5905.02343750, 5901.67919922],
...,
[5678.97851562, 5678.29101562, 5677.61718750, ..., 5659.64404297, 5660.17822266, 5660.71777344],
[5677.20019531, 5676.72167969, 5676.21582031, ..., 5671.89257812, 5671.96484375, 5672.04052734],
[5684.17089844, 5684.17089844, 5684.17089844, ..., 5684.17089844, 5684.17089844, 5684.17089844]]],


[[[5886.41552734, 5884.14013672, 5880.73486328, ..., 5879.68652344, 5880.03515625, 5880.01171875],
[5881.81250000, 5879.24853516, 5875.92480469, ..., 5883.05908203, 5883.44433594, 5883.53662109],
[5875.91064453, 5873.53027344, 5869.97558594, ..., 5885.05615234, 5885.67968750, 5885.84912109],
...,
[5135.21289062, 5136.14794922, 5137.08789062, ..., 5199.29785156, 5199.28906250, 5199.27294922],
[5125.81494141, 5126.30859375, 5126.81494141, ..., 5166.78955078, 5166.71044922, 5166.63574219],
[5133.56884766, 5133.56884766, 5133.56884766, ..., 5133.56884766, 5133.56884766, 5133.56884766]]]])
Tensor(shape=[32, 1, 64, 96], dtype=float32, place=CUDAPlace(0), stop_gradient=False,
[[[[ 1.53365946, 1.56821358, 1.59254313, ..., 1.76967919, 1.75414133, 1.72981191],
[ 1.49076736, 1.52847636, 1.55842805, ..., 1.78860593, 1.77073944, 1.74375486],
[ 1.45430923, 1.48589051, 1.51550996, ..., 1.79915130, 1.78075612, 1.75235295],
...,
[-0.84941006, -0.85192072, -0.85468274, ..., -0.97591680, -0.97557014, -0.97522920],
[-0.85806304, -0.85942668, -0.86075282, ..., -0.92589134, -0.92591161, -0.92590868],
[-0.86321437, -0.86321437, -0.86321437, ..., -0.86321437, -0.86321437, -0.86321437]]],


[[[ 1.19312811, 1.19312811, 1.19312811, ..., 1.41797256, 1.41797256, 1.42980647],
[ 1.18129420, 1.18129420, 1.19312811, ..., 1.40613866, 1.40613866, 1.41797256],
[ 1.16946030, 1.18129420, 1.18129420, ..., 1.38247085, 1.39430475, 1.40613866],
...,
[-0.32161328, -0.32161328, -0.32161328, ..., -0.64112908, -0.64112908, -0.65296298],
[-0.45178637, -0.45178637, -0.45178637, ..., -0.60562730, -0.60562730, -0.60562730],
[-0.57012552, -0.57012552, -0.57012552, ..., -0.57012552, -0.57012552, -0.57012552]]],


[[[ 1.27354443, 1.23963463, 1.20404911, ..., 1.66678536, 1.65781736, 1.63274837],
[ 1.27600026, 1.24055052, 1.20388150, ..., 1.67102659, 1.66302943, 1.63993359],
[ 1.27797639, 1.24270284, 1.20525670, ..., 1.67611718, 1.67006445, 1.64867616],
...,
[-2.76719522, -2.76422501, -2.76116848, ..., -2.35542059, -2.35301089, -2.35069084],
[-2.70262575, -2.70117831, -2.69967318, ..., -2.49212885, -2.49047613, -2.48884678],
[-2.60372186, -2.60372186, -2.60372186, ..., -2.60372186, -2.60372186, -2.60372186]]],


...,


[[[ 1.33619547, 1.33240199, 1.32262802, ..., 1.64537096, 1.64932334, 1.64679539],
[ 1.32043231, 1.32167172, 1.31897044, ..., 1.62040591, 1.62799287, 1.63002682],
[ 1.29609418, 1.30456805, 1.30656445, ..., 1.58547330, 1.59724367, 1.60244417],
...,
[-1.86226165, -1.86379290, -1.86548007, ..., -2.40683842, -2.41428089, -2.42176390],
[-2.04334712, -2.04378343, -2.04393649, ..., -2.37156200, -2.37468815, -2.37768126],
[-2.25897813, -2.25897813, -2.25898099, ..., -2.25914860, -2.25914860, -2.25914860]]],


[[[ 1.13780403, 1.15124142, 1.16612041, ..., 1.49421692, 1.48604071, 1.47535086],
[ 1.13148260, 1.14193547, 1.15452349, ..., 1.49629712, 1.48607254, 1.47101426],
[ 1.10930550, 1.12294805, 1.13248801, ..., 1.49754238, 1.48319781, 1.46341002],
...,
[ 0.14569932, 0.14163142, 0.13764441, ..., 0.03129805, 0.03445877, 0.03765127],
[ 0.13517708, 0.13234572, 0.12935257, ..., 0.10377213, 0.10419972, 0.10464754],
[ 0.17642245, 0.17642245, 0.17642245, ..., 0.17642245, 0.17642245, 0.17642245]]],


[[[ 1.37309551, 1.35963213, 1.33948326, ..., 1.33328032, 1.33534312, 1.33520448],
[ 1.34585965, 1.33068871, 1.31102240, ..., 1.35323560, 1.35551512, 1.35606122],
[ 1.31093860, 1.29685402, 1.27582109, ..., 1.36505222, 1.36874163, 1.36974418],
...,
[-3.07173944, -3.06620669, -3.06064510, ..., -2.69255137, -2.69260335, -2.69269872],
[-3.12734675, -3.12442565, -3.12142968, ..., -2.88490152, -2.88536954, -2.88581181],
[-3.08146715, -3.08146715, -3.08146715, ..., -3.08146715, -3.08146715, -3.08146715]]]])


/root/anaconda3/lib/python3.8/site-packages/paddle/nn/layer/norm.py:640: UserWarning: When training, we now always track global mean and variance.
warnings.warn(


---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
in
53 else:
54 model.train()
---> 55 trainfun(model)
56 paddle.save(model.state_dict(),paramsfile)
57

in trainfun(model)
17 eacc.append(acc)
18 eloss.append(loss)
---> 19 loss.backward()
20 optimizer.step()
21 optimizer.clear_grad()

in backward(self, grad_tensor, retain_graph)

~/anaconda3/lib/python3.8/site-packages/paddle/fluid/wrapped_decorator.py in __impl__(func, *args, **kwargs)
23 def __impl__(func, *args, **kwargs):
24 wrapped_func = decorator_func(func)
---> 25 return wrapped_func(*args, **kwargs)
26
27 return __impl__

~/anaconda3/lib/python3.8/site-packages/paddle/fluid/framework.py in __impl__(*args, **kwargs)
225 assert in_dygraph_mode(
226 ), "We only support '%s()' in dynamic graph mode, please call 'paddle.disable_static()' to enter dynamic graph mode." % func.__name__
--> 227 return func(*args, **kwargs)
228
229 return __impl__

~/anaconda3/lib/python3.8/site-packages/paddle/fluid/dygraph/varbase_patch_methods.py in backward(self, grad_tensor, retain_graph)
236 framework._dygraph_tracer())
237 else:
--> 238 core.dygraph_run_backward([self], [grad_tensor], retain_graph,
239 framework._dygraph_tracer())
240 else:

RuntimeError: (NotFound) No Output(X@GRAD) found for BatchNormGrad operator.
[Hint: Expected ctx->HasOutput(framework::GradVarName("X")) == true, but received ctx->HasOutput(framework::GradVarName("X")):0 != true:1.] (at /paddle/paddle/fluid/operators/batch_norm_op.cc:468)

0
收藏
回复
全部评论(2)
时间顺序
学大哥哥哥
#2 回复于2021-08

现在把数据归一化移到数据预处理了。就可以了。是不是bn之前必须有其他步骤。

0
回复
李长安
#3 回复于2021-08

感觉像是版本对应问题

0
回复
需求/bug反馈?一键提issue告诉我们
发现bug?如果您知道修复办法,欢迎提pr直接参与建设飞桨~
在@后输入用户全名并按空格结束,可艾特全站任一用户