Skip to content

Commit 7fffa52

Browse files
committed
reapplying changes
1 parent eda7535 commit 7fffa52

File tree

5 files changed

+9
-12
lines changed

5 files changed

+9
-12
lines changed

CHANGELOG.rst

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,10 +36,6 @@ Bugfixes
3636
Therefore, all 1.9 models `except for 1.9.6` will be compatible; a model trained on 1.9.6 will need
3737
to be retrained on 1.9.7.
3838

39-
Miscellaneous internal changes
40-
------------------------------
41-
- #5631
42-
4339

4440
[1.9.6] - 2020-04-15
4541
^^^^^^^^^^^^^^^^^^^^

changelog/5626.msic.rst

Lines changed: 0 additions & 1 deletion
This file was deleted.

changelog/5631.misc.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Set regularization constant in ``EmbeddingIntentClassifier`` to 0.001.

rasa/nlu/classifiers/diet_classifier.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1255,7 +1255,7 @@ def _create_sequence(
12551255
seq_ids = None
12561256

12571257
inputs = self._combine_sparse_dense_features(
1258-
features, mask, name, sparse_dropout, dense_dropout,
1258+
features, mask, name, sparse_dropout, dense_dropout
12591259
)
12601260

12611261
inputs = self._tf_layers[f"ffnn.{name}"](inputs, self._training)
@@ -1272,6 +1272,10 @@ def _create_sequence(
12721272
transformer_inputs, 1 - mask, self._training
12731273
)
12741274

1275+
if self.config[NUM_TRANSFORMER_LAYERS] > 0:
1276+
# apply activation
1277+
outputs = tfa.activations.gelu(outputs)
1278+
12751279
return outputs, inputs, seq_ids, lm_mask_bool
12761280

12771281
def _create_all_labels(self) -> Tuple[tf.Tensor, tf.Tensor]:
@@ -1283,7 +1287,7 @@ def _create_all_labels(self) -> Tuple[tf.Tensor, tf.Tensor]:
12831287
mask_label = self._compute_mask(label_lengths)
12841288

12851289
x = self._create_bow(
1286-
self.tf_label_data[LABEL_FEATURES], mask_label, self.label_name,
1290+
self.tf_label_data[LABEL_FEATURES], mask_label, self.label_name
12871291
)
12881292
all_labels_embed = self._tf_layers[f"embed.{LABEL}"](x)
12891293

@@ -1432,7 +1436,7 @@ def batch_loss(
14321436

14331437
label_ids = tf_batch_data[LABEL_IDS][0]
14341438
label = self._create_bow(
1435-
tf_batch_data[LABEL_FEATURES], mask_label, self.label_name,
1439+
tf_batch_data[LABEL_FEATURES], mask_label, self.label_name
14361440
)
14371441
loss, acc = self._calculate_label_loss(cls, label, label_ids)
14381442
self.intent_loss.update_state(loss)

rasa/utils/tensorflow/transformer.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -626,7 +626,4 @@ def call(
626626
# if normalization is done in encoding layers, then it should also be done
627627
# on the output, since the output can grow very large, being the sum of
628628
# a whole stack of unnormalized layer outputs.
629-
normalized_x = self._layer_norm(x) # (batch_size, length, units)
630-
631-
# apply final activation
632-
return tfa.activations.gelu(normalized_x)
629+
return self._layer_norm(x) # (batch_size, length, units)

0 commit comments

Comments
 (0)