Skip to content

Commit 595f6e3

Browse files
committed
Merge branch 'rasa-info-command' of github.com:RasaHQ/rasa into rasa-info-command
2 parents 6a4531d + d39bf89 commit 595f6e3

25 files changed

+272
-710
lines changed

.github/workflows/vulnerability-scan.yml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@ name: Vulnerability Scan
22

33
on:
44
schedule:
5-
# Run once every day
6-
- cron: '0 0 * * *'
5+
# Run every third day
6+
- cron: 0 0 * * */3
77

88
jobs:
99
scan:
@@ -35,10 +35,11 @@ jobs:
3535
echo "::set-env name=IMAGE_WITH_POETRY_LOCK::$IMAGE_NAME"
3636
3737
- name: Scan image 🕵️‍♀️🕵️‍♂️
38-
uses: homoluctus/gitrivy@v2.0.0
38+
uses: wochinge/gitrivy@6bf026b
3939
with:
4040
# Needs the token so it can create an issue once a vulnerability was found
4141
token: ${{ secrets.GITHUB_TOKEN }}
4242
image: ${{ env.IMAGE_WITH_POETRY_LOCK }}
4343
ignore_unfixed: true
4444
issue_label: "tool:trivy,type:vulnerability"
45+
fail_on_vulnerabilities: true

changelog/5737.bugfix.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
Pinned the library version for the Azure :ref:`cloud-storage` to ``2.1.0`` since the
2+
persistor is currently not compatible with later versions of the ``azure-storage-blob``
3+
library.

changelog/5756.improvement.rst

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
To avoid the problem of our entity extractors predicting entity labels for just a part of the words,
2+
we introduced a cleaning method after the prediction was done.
3+
We should avoid the incorrect prediction in the first place.
4+
To achieve this we will not tokenize words into sub-words anymore.
5+
We take the mean feature vectors of the sub-words as the feature vector of the word.
6+
7+
.. warning::
8+
This change is model breaking. Please, retrain your models.

poetry.lock

Lines changed: 33 additions & 76 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytype | .pytest_cache | build | dist))"
99

1010
[tool.poetry]
1111
name = "rasa"
12-
version = "1.10.0"
12+
version = "1.11.0a1"
1313
description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants"
1414
authors = [ "Rasa Technologies GmbH <[email protected]>",]
1515
maintainers = [ "Tom Bocklisch <[email protected]>",]
@@ -91,7 +91,7 @@ rocketchat_API = ">=0.6.31,<1.4.0"
9191
colorhash = "~1.0.2"
9292
pika = "~1.1.0"
9393
jsonschema = "~3.2"
94-
packaging = "~19.0"
94+
packaging = ">=19.0,<20.4"
9595
gevent = ">=1.4,<1.6"
9696
pytz = "^2019.1"
9797
rasa-sdk = "^1.10.0"
@@ -142,7 +142,7 @@ black = "^19.10b0"
142142
flake8 = "^3.7.9"
143143
pytype = "^2020.1.24"
144144
google-cloud-storage = "^1.25.0"
145-
azure-storage-blob = "^12.1.0"
145+
azure-storage-blob = "<=2.1.0"
146146
coveralls = "^2.0.0"
147147
towncrier = "^19.2.0"
148148
toml = "^0.10.0"

rasa/constants.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353
CONFIG_MANDATORY_KEYS_NLU = ["language", "pipeline"]
5454
CONFIG_MANDATORY_KEYS = CONFIG_MANDATORY_KEYS_CORE + CONFIG_MANDATORY_KEYS_NLU
5555

56-
MINIMUM_COMPATIBLE_VERSION = "1.10.0"
56+
MINIMUM_COMPATIBLE_VERSION = "1.11.0a1"
5757

5858
GLOBAL_USER_CONFIG_PATH = os.path.expanduser("~/.config/rasa/global.yml")
5959

rasa/nlu/classifiers/diet_classifier.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -803,7 +803,6 @@ def _predict_entities(
803803
)
804804

805805
entities = self.add_extractor_name(entities)
806-
entities = self.clean_up_entities(message, entities)
807806
entities = message.get(ENTITIES, []) + entities
808807

809808
return entities

rasa/nlu/constants.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737

3838
CLS_TOKEN = "__CLS__"
3939
POSITION_OF_CLS_TOKEN = -1
40+
NUMBER_OF_SUB_TOKENS = "number_of_sub_tokens"
4041

4142
MESSAGE_ATTRIBUTES = [TEXT, INTENT, RESPONSE]
4243

rasa/nlu/extractors/crf_entity_extractor.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,6 @@ def _update_crf_order(self, training_data: TrainingData):
195195
def process(self, message: Message, **kwargs: Any) -> None:
196196
entities = self.extract_entities(message)
197197
entities = self.add_extractor_name(entities)
198-
entities = self.clean_up_entities(message, entities)
199198
message.set(ENTITIES, message.get(ENTITIES, []) + entities, add_to_output=True)
200199

201200
def extract_entities(self, message: Message) -> List[Dict[Text, Any]]:

rasa/nlu/extractors/duckling_http_extractor.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,6 @@ def process(self, message: Message, **kwargs: Any) -> None:
186186
)
187187

188188
extracted = self.add_extractor_name(extracted)
189-
extracted = self.clean_up_entities(message, extracted)
190189
message.set(ENTITIES, message.get(ENTITIES, []) + extracted, add_to_output=True)
191190

192191
@classmethod

0 commit comments

Comments
 (0)