Skip to content

Commit

Permalink
Attention layers now allow None paddings, remove assert.
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 685751950
  • Loading branch information
lingvo-bot authored and copybara-github committed Oct 14, 2024
1 parent b821d00 commit 48e5034
Showing 1 changed file with 0 additions and 2 deletions.
2 changes: 0 additions & 2 deletions lingvo/core/layers_with_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -1500,7 +1500,6 @@ def FProp(

if p.has_aux_atten:
assert aux_vecs is not None
assert aux_paddings is not None
with tf.name_scope('aux_atten'):
atten_vec, atten_prob = self.atten.FProp(theta.atten, atten_vec,
aux_paddings, aux_vecs,
Expand Down Expand Up @@ -1550,7 +1549,6 @@ def ExtendStep(self,

if p.has_aux_atten:
assert aux_vecs is not None
assert aux_paddings is not None

batch_size = py_utils.GetShape(source_vecs)[0]

Expand Down

0 comments on commit 48e5034

Please sign in to comment.