From 0adae94e85a97fb52c9564da85a64dcc18f87a8b Mon Sep 17 00:00:00 2001 From: "codewithzichao.github.io" Date: Tue, 26 Jan 2021 17:35:58 +0800 Subject: [PATCH] Prelu activation function is deprecated, use tf.keras.layers.PReLU() is much better. --- DIN/modules.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/DIN/modules.py b/DIN/modules.py index 60a84b3..6c75542 100644 --- a/DIN/modules.py +++ b/DIN/modules.py @@ -17,7 +17,11 @@ def __init__(self, att_hidden_units, activation='prelu'): """ """ super(Attention_Layer, self).__init__() - self.att_dense = [Dense(unit, activation=activation) for unit in att_hidden_units] + if activation=="prelu": + self.att_dense = [Dense(unit, activation=tf.keras.layers.PReLU()) for unit in att_hidden_units] + else: + self.att_dense = [Dense(unit, activation=activation) for unit in att_hidden_units] + self.att_final_dense = Dense(1) def call(self, inputs):