fix attenton loss for ce

pull/4718/head
tink2123 2021-11-22 12:08:33 +08:00
parent f26846ccd1
commit e9c3ff84a9
1 changed files with 0 additions and 2 deletions

View File

@ -45,7 +45,6 @@ class AttentionHead(nn.Layer):
output_hiddens = []
if targets is not None:
print("target is not None")
for i in range(num_steps):
char_onehots = self._char_to_onehot(
targets[:, i], onehot_dim=self.num_classes)
@ -55,7 +54,6 @@ class AttentionHead(nn.Layer):
output = paddle.concat(output_hiddens, axis=1)
probs = self.generator(output)
else:
print("target is None")
targets = paddle.zeros(shape=[batch_size], dtype="int32")
probs = None
char_onehots = None