From 3468af713f068df173177c380385507a44cd0559 Mon Sep 17 00:00:00 2001 From: Qznan Date: Fri, 13 Mar 2020 14:31:41 +0800 Subject: [PATCH] Update modules.py fix bug --- modules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules.py b/modules.py index a8c0f2c..6e00044 100755 --- a/modules.py +++ b/modules.py @@ -105,7 +105,7 @@ def multihead_attention(queries, keys, num_units=None, num_heads=8, dropout_rate with tf.variable_scope(scope, reuse=reuse): # Set the fall back option for num_units if num_units is None: - num_units = queries.get_shape().as_list[-1] + num_units = queries.get_shape().as_list()[-1] # Linear projections Q = tf.layers.dense(queries, num_units, activation=tf.nn.relu, name="dense_q") # (N, T_q, C)