Skip to content

Commit

Permalink
Fix analytics tests (cvat-ai#6231)
Browse files Browse the repository at this point in the history
  • Loading branch information
azhavoro authored Jun 4, 2023
1 parent 520d219 commit 75acfc5
Showing 1 changed file with 47 additions and 17 deletions.
64 changes: 47 additions & 17 deletions tests/python/rest_api/test_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,20 +101,34 @@ def setup(self, restore_clickhouse_db_per_function):

self.task_ids = [t[0] for t in task_ids]

expected_request_ids = [project_request_id, *[t[1] for t in task_ids]]
assert project_request_id is not None
assert all(t[1] is not None for t in task_ids)

assert all(req_id is not None for req_id in expected_request_ids)

self._wait_for_request_ids(expected_request_ids)
event_filters = [
(
(lambda e: json.loads(e["payload"])["request"]["id"], [project_request_id]),
("scope", ["create:project"]),
),
]
for task_id in task_ids:
event_filters.extend(
(
(
(lambda e: json.loads(e["payload"])["request"]["id"], [task_id[1]]),
("scope", ["create:task"]),
),
(("scope", ["create:job"]),),
)
)
self._wait_for_request_ids(event_filters)

def _wait_for_request_ids(self, expected_request_ids):
def _wait_for_request_ids(self, event_filters):
MAX_RETRIES = 5
SLEEP_INTERVAL = 2
while MAX_RETRIES > 0:
data = self._test_get_audit_logs_as_csv()
events = self._csv_to_dict(data)
request_ids = set(json.loads(e["payload"])["request"]["id"] for e in events)
if all(req_id in request_ids for req_id in expected_request_ids):
if all(self._filter_events(events, filter) for filter in event_filters):
break
MAX_RETRIES -= 1
sleep(SLEEP_INTERVAL)
Expand Down Expand Up @@ -155,13 +169,12 @@ def _csv_to_dict(csv_data):
return res

@staticmethod
def _filter_events(events, filter_):
def _filter_events(events, filters):
res = []
for event in events:
if all(
(event[filter_key] == filter_value for filter_key, filter_value in filter_.items())
):
res.append(event)
get_value = lambda getter, e: getter(e) if callable(getter) else e.get(getter, None)
for e in events:
if all(get_value(getter, e) in expected_values for getter, expected_values in filters):
res.append(e)

return res

Expand Down Expand Up @@ -195,7 +208,7 @@ def test_filter_by_project(self):
data = self._test_get_audit_logs_as_csv(**query_params)
events = self._csv_to_dict(data)

filtered_events = self._filter_events(events, {"project_id": str(self.project_id)})
filtered_events = self._filter_events(events, [("project_id", [str(self.project_id)])])
assert len(filtered_events)
assert len(events) == len(filtered_events)

Expand All @@ -213,7 +226,7 @@ def test_filter_by_task(self):
data = self._test_get_audit_logs_as_csv(**query_params)
events = self._csv_to_dict(data)

filtered_events = self._filter_events(events, {"task_id": str(task_id)})
filtered_events = self._filter_events(events, [("task_id", [str(task_id)])])
assert len(filtered_events)
assert len(events) == len(filtered_events)

Expand Down Expand Up @@ -251,7 +264,24 @@ def test_delete_project(self):
response = delete_method("admin1", f"projects/{self.project_id}")
assert response.status_code == HTTPStatus.NO_CONTENT

self._wait_for_request_ids([response.headers.get("X-Request-Id")])
event_filters = (
(
(
lambda e: json.loads(e["payload"])["request"]["id"],
[response.headers.get("X-Request-Id")],
),
("scope", ["delete:project"]),
),
(
(
lambda e: json.loads(e["payload"])["request"]["id"],
[response.headers.get("X-Request-Id")],
),
("scope", ["delete:task"]),
),
)

self._wait_for_request_ids(event_filters)

query_params = {
"project_id": self.project_id,
Expand All @@ -260,7 +290,7 @@ def test_delete_project(self):
data = self._test_get_audit_logs_as_csv(**query_params)
events = self._csv_to_dict(data)

filtered_events = self._filter_events(events, {"project_id": str(self.project_id)})
filtered_events = self._filter_events(events, [("project_id", [str(self.project_id)])])
assert len(filtered_events)
assert len(events) == len(filtered_events)

Expand Down

0 comments on commit 75acfc5

Please sign in to comment.