Skip to content

Commit 11121e0

Browse files
committed
fix bug: missing alayer_norm and vlayer_norm
1 parent 4581503 commit 11121e0

File tree

1 file changed

+4
-0
lines changed

1 file changed

+4
-0
lines changed

new_models.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,10 @@ def __init__(self, config):
7474
self.fusion.add_module('fusion_layer_3',
7575
nn.Linear(in_features=6 * self.config.hidden_size, out_features=output_size, bias = False))
7676

77+
self.tlayer_norm = nn.LayerNorm((hidden_sizes[0] * 2,))
78+
self.vlayer_norm = nn.LayerNorm((hidden_sizes[1] * 2,))
79+
self.alayer_norm = nn.LayerNorm((hidden_sizes[2] * 2,))
80+
7781
self.MLP_Communicator1 = MLP_Communicator(self.config.hidden_size, 2, hidden_size=64, depth=1)
7882
self.MLP_Communicator2 = MLP_Communicator(self.config.hidden_size, 2, hidden_size=64, depth=1)
7983

0 commit comments

Comments
 (0)