AttributeError: 'Parameter' object has no attribute 'gradient_clip_attr'
[2020-06-30 23:05:54,208] [ INFO] - Load pretraining parameters from /home/aistudio/.paddlehub/modules/ernie_v2_eng_large/assets/params.
---------------------------------------------------------------------------AttributeError Traceback (most recent call last) in 18 num_classes=dataset.num_labels, 19 config=config,---> 20 metrics_choices=["acc"])/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/classifier_task.py in __init__(self, feature, num_classes, feed_list, data_reader, startup_program, config, hidden_units, metrics_choices) 178 hidden_units=None, 179 metrics_choices="default"):--> 180 """ 181 Args: 182 num_classes: total labels of the text classification task./opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/classifier_task.py in __init__(self, feature, num_classes, feed_list, data_reader, startup_program, config, hidden_units, metrics_choices) 47 main_program = feature.block.program 48 super(ClassifierTask, self).__init__(---> 49 data_reader=data_reader, 50 main_program=main_program, 51 feed_list=feed_list,/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in __init__(self, feed_list, data_reader, main_program, startup_program, config, metrics_choices) 296 297 else:--> 298 self._base_main_program = clone_program( 299 main_program, for_test=False) 300 if startup_program is None:/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/common/paddle_helper.py in clone_program(origin_program, for_test) 272 273 def clone_program(origin_program, for_test=False):--> 274 dest_program = fluid.Program() 275 _copy_vars_and_ops_in_blocks(origin_program.global_block(), 276 dest_program.global_block())/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/common/paddle_helper.py in _copy_vars_and_ops_in_blocks(from_block, to_block) 145 146 def _copy_vars_and_ops_in_blocks(from_block, to_block):--> 147 for var in from_block.vars: 148 var = from_block.var(var) 149 var_info = copy.deepcopy(get_variable_info(var))/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/common/paddle_helper.py in get_variable_info(var) 63 var_info['trainable'] = var.trainable 64 var_info['optimize_attr'] = var.optimize_attr---> 65 var_info['regularizer'] = var.regularizer 66 if not version_compare(paddle.__version__, '1.8'): 67 var_info['gradient_clip_attr'] = var.gradient_clip_attrAttributeError: 'Parameter' object has no attribute 'gradient_clip_attr'
[2020-06-30 23:05:54,208] [ INFO] - Load pretraining parameters from /home/aistudio/.paddlehub/modules/ernie_v2_eng_large/assets/params.
---------------------------------------------------------------------------AttributeError Traceback (most recent call last) in
18 num_classes=dataset.num_labels,
19 config=config,
---> 20 metrics_choices=["acc"])
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/classifier_task.py in __init__(self, feature, num_classes, feed_list, data_reader, startup_program, config, hidden_units, metrics_choices)
178 hidden_units=None,
179 metrics_choices="default"):
--> 180 """
181 Args:
182 num_classes: total labels of the text classification task.
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/classifier_task.py in __init__(self, feature, num_classes, feed_list, data_reader, startup_program, config, hidden_units, metrics_choices)
47 main_program = feature.block.program
48 super(ClassifierTask, self).__init__(
---> 49 data_reader=data_reader,
50 main_program=main_program,
51 feed_list=feed_list,
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in __init__(self, feed_list, data_reader, main_program, startup_program, config, metrics_choices)
296
297 else:
--> 298 self._base_main_program = clone_program(
299 main_program, for_test=False)
300 if startup_program is None:
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/common/paddle_helper.py in clone_program(origin_program, for_test)
272
273 def clone_program(origin_program, for_test=False):
--> 274 dest_program = fluid.Program()
275 _copy_vars_and_ops_in_blocks(origin_program.global_block(),
276 dest_program.global_block())
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/common/paddle_helper.py in _copy_vars_and_ops_in_blocks(from_block, to_block)
145
146 def _copy_vars_and_ops_in_blocks(from_block, to_block):
--> 147 for var in from_block.vars:
148 var = from_block.var(var)
149 var_info = copy.deepcopy(get_variable_info(var))
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/common/paddle_helper.py in get_variable_info(var)
63 var_info['trainable'] = var.trainable
64 var_info['optimize_attr'] = var.optimize_attr
---> 65 var_info['regularizer'] = var.regularizer
66 if not version_compare(paddle.__version__, '1.8'):
67 var_info['gradient_clip_attr'] = var.gradient_clip_attr
AttributeError: 'Parameter' object has no attribute 'gradient_clip_attr'