预测报错OSError怎么破?
收藏
---------------------------------------------------------------------------OSError Traceback (most recent call last) in ----> 1 preds = predict(model, test_loader, test_ds, label_vocab) 2 file_path = "ernie_results.txt" 3 with open(file_path, "w", encoding="utf8") as fout: 4 fout.write("\n".join(preds)) 5 # Print some examples in predict(model, data_loader, ds, label_vocab) /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/base.py in _decorate_function(func, *args, **kwargs) 329 def _decorate_function(func, *args, **kwargs): 330 with self: --> 331 return func(*args, **kwargs) 332 333 @decorator.decorator ~/utils.py in predict(model, data_loader, ds, label_vocab) 29 all_lens = [] 30 for input_ids, seg_ids, lens, labels in data_loader: ---> 31 logits = model(input_ids, seg_ids) 32 preds = paddle.argmax(logits, axis=-1) 33 # Drop CLS prediction /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py in __call__(self, *inputs, **kwargs) 900 self._built = True 901 --> 902 outputs = self.forward(*inputs, **kwargs) 903 904 for forward_post_hook in self._forward_post_hooks.values(): /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlenlp/transformers/ernie/modeling.py in forward(self, input_ids, token_type_ids, position_ids, attention_mask) 624 token_type_ids=token_type_ids, 625 position_ids=position_ids, --> 626 attention_mask=attention_mask) 627 628 sequence_output = self.dropout(sequence_output) /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py in __call__(self, *inputs, **kwargs) 900 self._built = True 901 --> 902 outputs = self.forward(*inputs, **kwargs) 903 904 for forward_post_hook in self._forward_post_hooks.values(): /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlenlp/transformers/ernie/modeling.py in forward(self, input_ids, token_type_ids, position_ids, attention_mask) 353 position_ids=position_ids, 354 token_type_ids=token_type_ids) --> 355 encoder_outputs = self.encoder(embedding_output, attention_mask) 356 sequence_output = encoder_outputs 357 pooled_output = self.pooler(sequence_output) /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py in __call__(self, *inputs, **kwargs) 900 self._built = True 901 --> 902 outputs = self.forward(*inputs, **kwargs) 903 904 for forward_post_hook in self._forward_post_hooks.values(): /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/nn/layer/transformer.py in forward(self, src, src_mask, cache) 681 for i, mod in enumerate(self.layers): 682 if cache is None: --> 683 output = mod(output, src_mask=src_mask) 684 else: 685 output, new_cache = mod(output, /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py in __call__(self, *inputs, **kwargs) 900 self._built = True 901 --> 902 outputs = self.forward(*inputs, **kwargs) 903 904 for forward_post_hook in self._forward_post_hooks.values(): /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/nn/layer/transformer.py in forward(self, src, src_mask, cache) 565 # Add cache for encoder for the usage like UniLM 566 if cache is None: --> 567 src = self.self_attn(src, src, src, src_mask) 568 else: 569 src, incremental_cache = self.self_attn(src, src, src, src_mask, /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py in __call__(self, *inputs, **kwargs) 900 self._built = True 901 --> 902 outputs = self.forward(*inputs, **kwargs) 903 904 for forward_post_hook in self._forward_post_hooks.values(): /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/nn/layer/transformer.py in forward(self, query, key, value, attn_mask, cache) 404 attn_mask = _convert_attention_mask(attn_mask, product.dtype) 405 product = product + attn_mask --> 406 weights = F.softmax(product) 407 if self.dropout: 408 weights = F.dropout( /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/nn/functional/activation.py in softmax(x, axis, dtype, name) 874 outs_cast = x if dtype is None \ 875 else core.ops.cast(x, 'in_dtype', x.dtype, 'out_dtype', dtype) --> 876 return core.ops.softmax(outs_cast, 'axis', axis, 'use_cudnn', use_cudnn) 877 878 if dtype is None: OSError: (External) Cudnn error, CUDNN_STATUS_NOT_SUPPORTED (at /paddle/paddle/fluid/platform/cudnn_helper.h:231) [operator < softmax > error]
0
收藏
请登录后评论
label 类型错误?
没有了,很微妙的错误,paddlenlp使用错误。