Skip to content

Commit

Permalink
chore: Update dataset_id & dataset_type datasource_id & datasource_ty…
Browse files Browse the repository at this point in the history
…pe for SPA explore (apache#22461)
  • Loading branch information
hughhhh authored Dec 23, 2022
1 parent aa55800 commit 9b26794
Show file tree
Hide file tree
Showing 13 changed files with 65 additions and 63 deletions.
2 changes: 1 addition & 1 deletion superset-frontend/src/SqlLab/fixtures.ts
Original file line number Diff line number Diff line change
Expand Up @@ -722,7 +722,7 @@ export const mockdatasets = [...new Array(3)].map((_, i) => ({
changed_by: 'user',
changed_on: new Date().toISOString(),
database_name: `db ${i}`,
explore_url: `/explore/?dataset_type=table&dataset_id=${i}`,
explore_url: `/explore/?datasource_type=table&datasource_id=${i}`,
id: i,
schema: `schema ${i}`,
table_name: `coolest table ${i}`,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ test('get datasource and viz type from form_data search param - url when creatin
`${EXPLORE_BASE_URL}?form_data=%7B%22viz_type%22%3A%22big_number%22%2C%22datasource%22%3A%222__table%22%7D`,
);
expect(getParsedExploreURLParams().toString()).toEqual(
'viz_type=big_number&dataset_id=2&dataset_type=table',
'viz_type=big_number&datasource_id=2&datasource_type=table',
);
});

Expand All @@ -58,5 +58,5 @@ test('get permalink key from path params', () => {

test('get dataset id from path params', () => {
setupLocation(`${EXPLORE_BASE_URL}table/42/`);
expect(getParsedExploreURLParams().toString()).toEqual('dataset_id=42');
expect(getParsedExploreURLParams().toString()).toEqual('datasource_id=42');
});
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ const EXPLORE_URL_SEARCH_PARAMS = {
parser: (formData: string) => {
const formDataObject = JSON.parse(formData);
if (formDataObject.datasource) {
const [dataset_id, dataset_type] =
const [datasource_id, datasource_type] =
formDataObject.datasource.split('__');
formDataObject.dataset_id = dataset_id;
formDataObject.dataset_type = dataset_type;
formDataObject.datasource_id = datasource_id;
formDataObject.datasource_type = datasource_type;
delete formDataObject.datasource;
}
return formDataObject;
Expand All @@ -41,17 +41,17 @@ const EXPLORE_URL_SEARCH_PARAMS = {
slice_id: {
name: 'slice_id',
},
dataset_id: {
name: 'dataset_id',
datasource_id: {
name: 'datasource_id',
},
dataset_type: {
name: 'dataset_type',
datasource_type: {
name: 'datasource_type',
},
datasource: {
name: 'datasource',
parser: (datasource: string) => {
const [dataset_id, dataset_type] = datasource.split('__');
return { dataset_id, dataset_type };
const [datasource_id, datasouce_type] = datasource.split('__');
return { datasource_id, datasouce_type };
},
},
form_data_key: {
Expand All @@ -70,7 +70,7 @@ const EXPLORE_URL_SEARCH_PARAMS = {

const EXPLORE_URL_PATH_PARAMS = {
p: 'permalink_key', // permalink
table: 'dataset_id',
table: 'datasource_id',
};

// search params can be placed in form_data object
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ const mockdatasets = [...new Array(3)].map((_, i) => ({
changed_by: 'user',
changed_on: new Date().toISOString(),
database_name: `db ${i}`,
explore_url: `/explore/?dataset_type=table&dataset_id=${i}`,
explore_url: `/explore/?datasource_type=table&datasource_id=${i}`,
id: i,
schema: `schema ${i}`,
table_name: `coolest table ${i}`,
Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def url(self) -> str:
def explore_url(self) -> str:
if self.default_endpoint:
return self.default_endpoint
return f"/explore/?dataset_type={self.type}&dataset_id={self.id}"
return f"/explore/?datasource_type={self.type}&datasource_id={self.id}"

@property
def column_formats(self) -> Dict[str, Optional[str]]:
Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/sqla/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ def edit(self, pk: str) -> FlaskResponse:
resp = super().edit(pk)
if isinstance(resp, str):
return resp
return redirect("/explore/?dataset_type=table&dataset_id={}".format(pk))
return redirect("/explore/?datasource_type=table&datasource_id={}".format(pk))

@expose("/list/")
@has_access
Expand Down
12 changes: 6 additions & 6 deletions superset/explore/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,11 @@ def get(self) -> Response:
- in: query
schema:
type: integer
name: dataset_id
name: datasource_id
- in: query
schema:
type: string
name: dataset_type
name: datasource_type
responses:
200:
description: Returns the initial context.
Expand All @@ -110,8 +110,8 @@ def get(self) -> Response:
actor=g.user,
permalink_key=request.args.get("permalink_key", type=str),
form_data_key=request.args.get("form_data_key", type=str),
dataset_id=request.args.get("dataset_id", type=int),
dataset_type=request.args.get("dataset_type", type=str),
datasource_id=request.args.get("datasource_id", type=int),
datasource_type=request.args.get("datasource_type", type=str),
slice_id=request.args.get("slice_id", type=int),
)
result = GetExploreCommand(params).run()
Expand All @@ -124,8 +124,8 @@ def get(self) -> Response:
return self.response(
403,
message=ex.message,
dataset_id=ex.dataset_id,
dataset_type=ex.dataset_type,
datasource_id=ex.datasource_id,
datasource_type=ex.datasource_type,
)
except (ChartNotFoundError, ExplorePermalinkGetFailedError) as ex:
return self.response(404, message=str(ex))
Expand Down
56 changes: 29 additions & 27 deletions superset/explore/commands/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ def __init__(
) -> None:
self._permalink_key = params.permalink_key
self._form_data_key = params.form_data_key
self._dataset_id = params.dataset_id
self._dataset_type = params.dataset_type
self._datasource_id = params.datasource_id
self._datasource_type = params.datasource_type
self._slice_id = params.slice_id

# pylint: disable=too-many-locals,too-many-branches,too-many-statements
Expand Down Expand Up @@ -87,10 +87,10 @@ def run(self) -> Optional[Dict[str, Any]]:
message = _(
"Form data not found in cache, reverting to chart metadata."
)
elif self._dataset_id:
elif self._datasource_id:
initial_form_data[
"datasource"
] = f"{self._dataset_id}__{self._dataset_type}"
] = f"{self._datasource_id}__{self._datasource_type}"
if self._form_data_key:
message = _(
"Form data not found in cache, reverting to dataset metadata."
Expand All @@ -102,41 +102,43 @@ def run(self) -> Optional[Dict[str, Any]]:
initial_form_data=initial_form_data,
)
try:
self._dataset_id, self._dataset_type = get_datasource_info(
self._dataset_id, self._dataset_type, form_data
self._datasource_id, self._datasource_type = get_datasource_info(
self._datasource_id, self._datasource_type, form_data
)
except SupersetException:
self._dataset_id = None
self._datasource_id = None
# fallback unkonw datasource to table type
self._dataset_type = SqlaTable.type
self._datasource_type = SqlaTable.type

dataset: Optional[BaseDatasource] = None
if self._dataset_id is not None:
datasource: Optional[BaseDatasource] = None
if self._datasource_id is not None:
try:
dataset = DatasourceDAO.get_datasource(
db.session, cast(str, self._dataset_type), self._dataset_id
datasource = DatasourceDAO.get_datasource(
db.session, cast(str, self._datasource_type), self._datasource_id
)
except DatasourceNotFound:
pass
dataset_name = dataset.name if dataset else _("[Missing Dataset]")
datasource_name = datasource.name if datasource else _("[Missing Dataset]")

if dataset:
if datasource:
if current_app.config["ENABLE_ACCESS_REQUEST"] and (
not security_manager.can_access_datasource(dataset)
not security_manager.can_access_datasource(datasource)
):
message = __(security_manager.get_datasource_access_error_msg(dataset))
message = __(
security_manager.get_datasource_access_error_msg(datasource)
)
raise DatasetAccessDeniedError(
message=message,
dataset_type=self._dataset_type,
dataset_id=self._dataset_id,
datasource_type=self._datasource_type,
datasource_id=self._datasource_id,
)

viz_type = form_data.get("viz_type")
if not viz_type and dataset and dataset.default_endpoint:
raise WrongEndpointError(redirect=dataset.default_endpoint)
if not viz_type and datasource and datasource.default_endpoint:
raise WrongEndpointError(redirect=datasource.default_endpoint)

form_data["datasource"] = (
str(self._dataset_id) + "__" + cast(str, self._dataset_type)
str(self._datasource_id) + "__" + cast(str, self._datasource_type)
)

# On explore, merge legacy/extra filters and URL params into the form data
Expand All @@ -145,16 +147,16 @@ def run(self) -> Optional[Dict[str, Any]]:
utils.merge_request_params(form_data, request.args)

# TODO: this is a dummy placeholder - should be refactored to being just `None`
dataset_data: Dict[str, Any] = {
"type": self._dataset_type,
"name": dataset_name,
datasource_data: Dict[str, Any] = {
"type": self._datasource_type,
"name": datasource_name,
"columns": [],
"metrics": [],
"database": {"id": 0, "backend": ""},
}
try:
if dataset:
dataset_data = dataset.data
if datasource:
datasource_data = datasource.data
except SupersetException as ex:
message = ex.message
except SQLAlchemyError:
Expand All @@ -178,7 +180,7 @@ def run(self) -> Optional[Dict[str, Any]]:
metadata["changed_by"] = slc.changed_by.get_full_name()

return {
"dataset": sanitize_datasource_data(dataset_data),
"dataset": sanitize_datasource_data(datasource_data),
"form_data": form_data,
"slice": slc.data if slc else None,
"message": message,
Expand Down
4 changes: 2 additions & 2 deletions superset/explore/commands/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,6 @@ class CommandParameters:
actor: User
permalink_key: Optional[str]
form_data_key: Optional[str]
dataset_id: Optional[int]
dataset_type: Optional[str]
datasource_id: Optional[int]
datasource_type: Optional[str]
slice_id: Optional[int]
6 changes: 3 additions & 3 deletions superset/explore/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@

class DatasetAccessDeniedError(ForbiddenError):
def __init__(
self, message: str, dataset_id: Optional[int], dataset_type: Optional[str]
self, message: str, datasource_id: Optional[int], datasource_type: Optional[str]
) -> None:
self.message = message
self.dataset_id = dataset_id
self.dataset_type = dataset_type
self.datasource_id = datasource_id
self.datasource_type = datasource_type
super().__init__(self.message)


Expand Down
2 changes: 1 addition & 1 deletion tests/integration_tests/core_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -1419,7 +1419,7 @@ def test_feature_flag_serialization(self):
"/superset/welcome",
f"/superset/dashboard/{dash_id}/",
"/superset/profile/admin/",
f"/explore/?dataset_type=table&dataset_id={tbl_id}",
f"/explore/?datasource_type=table&datasource_id={tbl_id}",
]
for url in urls:
data = self.get_resp(url)
Expand Down
14 changes: 7 additions & 7 deletions tests/integration_tests/explore/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def test_no_params_provided(test_client, login_as_admin):

def test_get_from_cache(test_client, login_as_admin, dataset):
resp = test_client.get(
f"api/v1/explore/?form_data_key={FORM_DATA_KEY}&dataset_id={dataset.id}&dataset_type={dataset.type}"
f"api/v1/explore/?form_data_key={FORM_DATA_KEY}&datasource_id={dataset.id}&datasource_type={dataset.type}"
)
assert resp.status_code == 200
data = json.loads(resp.data.decode("utf-8"))
Expand Down Expand Up @@ -146,7 +146,7 @@ def test_get_from_cache_unknown_key_chart_id(
def test_get_from_cache_unknown_key_dataset(test_client, login_as_admin, dataset):
unknown_key = "unknown_key"
resp = test_client.get(
f"api/v1/explore/?form_data_key={unknown_key}&dataset_id={dataset.id}&dataset_type={dataset.type}"
f"api/v1/explore/?form_data_key={unknown_key}&datasource_id={dataset.id}&datasource_type={dataset.type}"
)
assert resp.status_code == 200
data = json.loads(resp.data.decode("utf-8"))
Expand Down Expand Up @@ -204,15 +204,15 @@ def test_get_dataset_access_denied(
):
message = "Dataset access denied"
mock_can_access_datasource.side_effect = DatasetAccessDeniedError(
message=message, dataset_id=dataset.id, dataset_type=dataset.type
message=message, datasource_id=dataset.id, datasource_type=dataset.type
)
resp = test_client.get(
f"api/v1/explore/?form_data_key={FORM_DATA_KEY}&dataset_id={dataset.id}&dataset_type={dataset.type}"
f"api/v1/explore/?form_data_key={FORM_DATA_KEY}&datasource_id={dataset.id}&datasource_type={dataset.type}"
)
data = json.loads(resp.data.decode("utf-8"))
assert resp.status_code == 403
assert data["dataset_id"] == dataset.id
assert data["dataset_type"] == dataset.type
assert data["datasource_id"] == dataset.id
assert data["datasource_type"] == dataset.type
assert data["message"] == message


Expand All @@ -221,7 +221,7 @@ def test_wrong_endpoint(mock_get_datasource, test_client, login_as_admin, datase
dataset.default_endpoint = "another_endpoint"
mock_get_datasource.return_value = dataset
resp = test_client.get(
f"api/v1/explore/?dataset_id={dataset.id}&dataset_type={dataset.type}"
f"api/v1/explore/?datasource_id={dataset.id}&datasource_type={dataset.type}"
)
data = json.loads(resp.data.decode("utf-8"))
assert resp.status_code == 302
Expand Down
2 changes: 1 addition & 1 deletion tests/unit_tests/explore/api_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def test_explore_datasource_not_found(client: Any, full_api_access: None) -> Non
# validating the payload for a dataset that doesn't exist
# user should be expecting missing_datasource view
response = client.get(
"/api/v1/explore/?dataset_id=50000&dataset_type=table",
"/api/v1/explore/?datasource_id=50000&datasource_type=table",
)
response.json["result"]["dataset"]["name"] == "[Missing Dataset]"
assert response.status_code == 200

0 comments on commit 9b26794

Please sign in to comment.