diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/fixtures.ts b/superset-frontend/packages/superset-ui-chart-controls/src/fixtures.ts index 1e5152b2bd904..71c4dd31189ba 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/fixtures.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/fixtures.ts @@ -20,7 +20,7 @@ import { DatasourceType } from '@superset-ui/core'; import { Dataset } from './types'; export const TestDataset: Dataset = { - column_formats: {}, + column_format: {}, columns: [ { advanced_data_type: undefined, diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/types.ts b/superset-frontend/packages/superset-ui-chart-controls/src/types.ts index c26f53b6a2097..67582523bc312 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/types.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/types.ts @@ -67,7 +67,7 @@ export interface Dataset { type: DatasourceType; columns: ColumnMeta[]; metrics: Metric[]; - column_formats: Record; + column_format: Record; verbose_map: Record; main_dttm_col: string; // eg. ['["ds", true]', 'ds [asc]'] diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/utils/columnChoices.test.tsx b/superset-frontend/packages/superset-ui-chart-controls/test/utils/columnChoices.test.tsx index e27fa95120848..59f4796a44d21 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/test/utils/columnChoices.test.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/test/utils/columnChoices.test.tsx @@ -42,7 +42,7 @@ describe('columnChoices()', () => { }, ], verbose_map: {}, - column_formats: { fiz: 'NUMERIC', about: 'STRING', foo: 'DATE' }, + column_format: { fiz: 'NUMERIC', about: 'STRING', foo: 'DATE' }, datasource_name: 'my_datasource', description: 'this is my datasource', }), diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/utils/defineSavedMetrics.test.tsx b/superset-frontend/packages/superset-ui-chart-controls/test/utils/defineSavedMetrics.test.tsx index 765412d592c24..48b000ed17ffa 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/test/utils/defineSavedMetrics.test.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/test/utils/defineSavedMetrics.test.tsx @@ -39,7 +39,7 @@ describe('defineSavedMetrics', () => { time_grain_sqla: 'P1D', columns: [], verbose_map: {}, - column_formats: {}, + column_format: {}, datasource_name: 'my_datasource', description: 'this is my datasource', }; diff --git a/superset-frontend/src/components/Datasource/ChangeDatasourceModal.test.jsx b/superset-frontend/src/components/Datasource/ChangeDatasourceModal.test.jsx index 5dbc52c38a5ee..419af6e3cf752 100644 --- a/superset-frontend/src/components/Datasource/ChangeDatasourceModal.test.jsx +++ b/superset-frontend/src/components/Datasource/ChangeDatasourceModal.test.jsx @@ -49,7 +49,7 @@ const datasourceData = { const DATASOURCES_ENDPOINT = 'glob:*/api/v1/dataset/?q=(order_column:changed_on_delta_humanized,order_direction:desc,page:0,page_size:25)'; -const DATASOURCE_ENDPOINT = `glob:*/api/v1/dataset/${datasourceData.id}`; +const DATASOURCE_ENDPOINT = `glob:*/datasource/get/${datasourceData.type}/${datasourceData.id}`; const DATASOURCE_PAYLOAD = { new: 'data' }; const INFO_ENDPOINT = 'glob:*/api/v1/dataset/_info?*'; @@ -112,6 +112,6 @@ describe('ChangeDatasourceModal', () => { }); await waitForComponentToPaint(wrapper); - expect(fetchMock.calls(/api\/v1\/dataset\/7/)).toHaveLength(1); + expect(fetchMock.calls(/datasource\/get\/table\/7/)).toHaveLength(1); }); }); diff --git a/superset-frontend/src/components/Datasource/ChangeDatasourceModal.tsx b/superset-frontend/src/components/Datasource/ChangeDatasourceModal.tsx index 5e837f0a26846..b5b99e1089f4a 100644 --- a/superset-frontend/src/components/Datasource/ChangeDatasourceModal.tsx +++ b/superset-frontend/src/components/Datasource/ChangeDatasourceModal.tsx @@ -173,12 +173,10 @@ const ChangeDatasourceModal: FunctionComponent = ({ const handleChangeConfirm = () => { SupersetClient.get({ - endpoint: `/api/v1/dataset/${confirmedDataset?.id}`, + endpoint: `/datasource/get/${confirmedDataset?.type}/${confirmedDataset?.id}/`, }) .then(({ json }) => { - // eslint-disable-next-line no-param-reassign - json.result.type = 'table'; - onDatasourceSave(json.result); + onDatasourceSave(json); onChange(`${confirmedDataset?.id}__table`); }) .catch(response => { diff --git a/superset-frontend/src/components/Datasource/DatasourceModal.test.jsx b/superset-frontend/src/components/Datasource/DatasourceModal.test.jsx index 7d378902e6e93..12be350521515 100644 --- a/superset-frontend/src/components/Datasource/DatasourceModal.test.jsx +++ b/superset-frontend/src/components/Datasource/DatasourceModal.test.jsx @@ -40,8 +40,7 @@ const datasource = mockDatasource['7__table']; const SAVE_ENDPOINT = 'glob:*/api/v1/dataset/7'; const SAVE_PAYLOAD = { new: 'data' }; -const SAVE_DATASOURCE_ENDPOINT = 'glob:*/api/v1/dataset/7'; -const GET_DATASOURCE_ENDPOINT = SAVE_DATASOURCE_ENDPOINT; +const SAVE_DATASOURCE_ENDPOINT = 'glob:*/datasource/save/'; const mockedProps = { datasource, @@ -97,8 +96,7 @@ describe('DatasourceModal', () => { it('saves on confirm', async () => { const callsP = fetchMock.post(SAVE_ENDPOINT, SAVE_PAYLOAD); - fetchMock.put(SAVE_DATASOURCE_ENDPOINT, {}); - fetchMock.get(GET_DATASOURCE_ENDPOINT, {}); + fetchMock.post(SAVE_DATASOURCE_ENDPOINT, {}); act(() => { wrapper .find('button[data-test="datasource-modal-save"]') @@ -113,11 +111,7 @@ describe('DatasourceModal', () => { okButton.simulate('click'); }); await waitForComponentToPaint(wrapper); - // one call to PUT, then one to GET - const expected = [ - 'http://localhost/api/v1/dataset/7', - 'http://localhost/api/v1/dataset/7', - ]; + const expected = ['http://localhost/datasource/save/']; expect(callsP._calls.map(call => call[0])).toEqual( expected, ); /* eslint no-underscore-dangle: 0 */ diff --git a/superset-frontend/src/components/Datasource/DatasourceModal.tsx b/superset-frontend/src/components/Datasource/DatasourceModal.tsx index b5b698f8315bb..90135f40f9ecd 100644 --- a/superset-frontend/src/components/Datasource/DatasourceModal.tsx +++ b/superset-frontend/src/components/Datasource/DatasourceModal.tsx @@ -96,81 +96,39 @@ const DatasourceModal: FunctionComponent = ({ currentDatasource.schema; setIsSaving(true); - SupersetClient.put({ - endpoint: `/api/v1/dataset/${currentDatasource.id}`, - jsonPayload: { - table_name: currentDatasource.table_name, - database_id: currentDatasource.database?.id, - sql: currentDatasource.sql, - filter_select_enabled: currentDatasource.filter_select_enabled, - fetch_values_predicate: currentDatasource.fetch_values_predicate, - schema, - description: currentDatasource.description, - main_dttm_col: currentDatasource.main_dttm_col, - offset: currentDatasource.offset, - default_endpoint: currentDatasource.default_endpoint, - cache_timeout: - currentDatasource.cache_timeout === '' - ? null - : currentDatasource.cache_timeout, - is_sqllab_view: currentDatasource.is_sqllab_view, - template_params: currentDatasource.template_params, - extra: currentDatasource.extra, - is_managed_externally: currentDatasource.is_managed_externally, - external_url: currentDatasource.external_url, - metrics: currentDatasource?.metrics?.map( - (metric: Record) => { - const metricBody: any = { - expression: metric.expression, - description: metric.description, - metric_name: metric.metric_name, - metric_type: metric.metric_type, - d3format: metric.d3format, - verbose_name: metric.verbose_name, - warning_text: metric.warning_text, - uuid: metric.uuid, + SupersetClient.post({ + endpoint: '/datasource/save/', + postPayload: { + data: { + ...currentDatasource, + cache_timeout: + currentDatasource.cache_timeout === '' + ? null + : currentDatasource.cache_timeout, + schema, + metrics: currentDatasource?.metrics?.map( + (metric: Record) => ({ + ...metric, extra: buildExtraJsonObject(metric), - }; - if (!Number.isNaN(Number(metric.id))) { - metricBody.id = metric.id; - } - return metricBody; - }, - ), - columns: currentDatasource?.columns?.map( - (column: Record) => ({ - id: column.id, - column_name: column.column_name, - type: column.type, - advanced_data_type: column.advanced_data_type, - verbose_name: column.verbose_name, - description: column.description, - expression: column.expression, - filterable: column.filterable, - groupby: column.groupby, - is_active: column.is_active, - is_dttm: column.is_dttm, - python_date_format: column.python_date_format, - uuid: column.uuid, - extra: buildExtraJsonObject(column), - }), - ), - owners: currentDatasource.owners.map( - (o: Record) => o.value || o.id, - ), + }), + ), + columns: currentDatasource?.columns?.map( + (column: Record) => ({ + ...column, + extra: buildExtraJsonObject(column), + }), + ), + type: currentDatasource.type || currentDatasource.datasource_type, + owners: currentDatasource.owners.map( + (o: Record) => o.value || o.id, + ), + }, }, }) - .then(() => { - addSuccessToast(t('The dataset has been saved')); - return SupersetClient.get({ - endpoint: `/api/v1/dataset/${currentDatasource?.id}`, - }); - }) .then(({ json }) => { - // eslint-disable-next-line no-param-reassign - json.result.type = 'table'; + addSuccessToast(t('The dataset has been saved')); onDatasourceSave({ - ...json.result, + ...json, owners: currentDatasource.owners, }); onHide(); diff --git a/superset-frontend/src/dashboard/constants.ts b/superset-frontend/src/dashboard/constants.ts index aa9f4d8bd3619..97753abe45c54 100644 --- a/superset-frontend/src/dashboard/constants.ts +++ b/superset-frontend/src/dashboard/constants.ts @@ -28,7 +28,7 @@ export const PLACEHOLDER_DATASOURCE: Datasource = { columns: [], column_types: [], metrics: [], - column_formats: {}, + column_format: {}, verbose_map: {}, main_dttm_col: '', description: '', diff --git a/superset-frontend/src/explore/actions/datasourcesActions.test.ts b/superset-frontend/src/explore/actions/datasourcesActions.test.ts index 996758b262323..6317f1f4a6d15 100644 --- a/superset-frontend/src/explore/actions/datasourcesActions.test.ts +++ b/superset-frontend/src/explore/actions/datasourcesActions.test.ts @@ -34,7 +34,7 @@ const CURRENT_DATASOURCE = { type: DatasourceType.Table, columns: [], metrics: [], - column_formats: {}, + column_format: {}, verbose_map: {}, main_dttm_col: '__timestamp', // eg. ['["ds", true]', 'ds [asc]'] @@ -47,7 +47,7 @@ const NEW_DATASOURCE = { type: DatasourceType.Table, columns: [], metrics: [], - column_formats: {}, + column_format: {}, verbose_map: {}, main_dttm_col: '__timestamp', // eg. ['["ds", true]', 'ds [asc]'] diff --git a/superset-frontend/src/explore/components/controls/DatasourceControl/DatasourceControl.test.tsx b/superset-frontend/src/explore/components/controls/DatasourceControl/DatasourceControl.test.tsx index 80cb84ac8de7a..2c094e72afba0 100644 --- a/superset-frontend/src/explore/components/controls/DatasourceControl/DatasourceControl.test.tsx +++ b/superset-frontend/src/explore/components/controls/DatasourceControl/DatasourceControl.test.tsx @@ -69,20 +69,9 @@ const createProps = (overrides: JsonObject = {}) => ({ }); async function openAndSaveChanges(datasource: any) { - fetchMock.put( - 'glob:*/api/v1/dataset/*', - {}, - { - overwriteRoutes: true, - }, - ); - fetchMock.get( - 'glob:*/api/v1/dataset/*', - { result: datasource }, - { - overwriteRoutes: true, - }, - ); + fetchMock.post('glob:*/datasource/save/', datasource, { + overwriteRoutes: true, + }); userEvent.click(screen.getByTestId('datasource-menu-trigger')); userEvent.click(await screen.findByTestId('edit-dataset')); userEvent.click(await screen.findByTestId('datasource-modal-save')); @@ -165,7 +154,7 @@ test('Should show SQL Lab for sql_lab role', async () => { test('Click on Swap dataset option', async () => { const props = createProps(); - SupersetClientGet.mockImplementationOnce( + SupersetClientGet.mockImplementation( async ({ endpoint }: { endpoint: string }) => { if (endpoint.includes('_info')) { return { @@ -193,7 +182,7 @@ test('Click on Swap dataset option', async () => { test('Click on Edit dataset', async () => { const props = createProps(); - SupersetClientGet.mockImplementationOnce( + SupersetClientGet.mockImplementation( async () => ({ json: { result: [] } } as any), ); render(, { @@ -217,7 +206,7 @@ test('Edit dataset should be disabled when user is not admin', async () => { // @ts-expect-error props.user.roles = {}; props.datasource.owners = []; - SupersetClientGet.mockImplementationOnce( + SupersetClientGet.mockImplementation( async () => ({ json: { result: [] } } as any), ); diff --git a/superset-frontend/src/explore/controlUtils/controlUtils.test.tsx b/superset-frontend/src/explore/controlUtils/controlUtils.test.tsx index e03aba06d11df..5feefc3481aa1 100644 --- a/superset-frontend/src/explore/controlUtils/controlUtils.test.tsx +++ b/superset-frontend/src/explore/controlUtils/controlUtils.test.tsx @@ -51,7 +51,7 @@ describe('controlUtils', () => { type: DatasourceType.Table, columns: [{ column_name: 'a' }], metrics: [{ metric_name: 'first' }, { metric_name: 'second' }], - column_formats: {}, + column_format: {}, verbose_map: {}, main_dttm_col: '', datasource_name: '1__table', diff --git a/superset-frontend/src/explore/controlUtils/getControlValuesCompatibleWithDatasource.test.ts b/superset-frontend/src/explore/controlUtils/getControlValuesCompatibleWithDatasource.test.ts index c8d34f749d3dc..ad7ccc480121a 100644 --- a/superset-frontend/src/explore/controlUtils/getControlValuesCompatibleWithDatasource.test.ts +++ b/superset-frontend/src/explore/controlUtils/getControlValuesCompatibleWithDatasource.test.ts @@ -34,7 +34,7 @@ const sampleDatasource: Dataset = { { column_name: 'sample_column_4' }, ], metrics: [{ metric_name: 'saved_metric_2' }], - column_formats: {}, + column_format: {}, verbose_map: {}, main_dttm_col: '', datasource_name: 'Sample Dataset', diff --git a/superset-frontend/src/explore/fixtures.tsx b/superset-frontend/src/explore/fixtures.tsx index a0f3c112ec1b8..351e6f26b126b 100644 --- a/superset-frontend/src/explore/fixtures.tsx +++ b/superset-frontend/src/explore/fixtures.tsx @@ -135,7 +135,7 @@ export const exploreInitialData: ExplorePageInitialData = { type: DatasourceType.Table, columns: [{ column_name: 'a' }], metrics: [{ metric_name: 'first' }, { metric_name: 'second' }], - column_formats: {}, + column_format: {}, verbose_map: {}, main_dttm_col: '', datasource_name: '8__table', @@ -153,7 +153,7 @@ export const fallbackExploreInitialData: ExplorePageInitialData = { type: DatasourceType.Table, columns: [], metrics: [], - column_formats: {}, + column_format: {}, verbose_map: {}, main_dttm_col: '', owners: [], diff --git a/superset-frontend/src/utils/getDatasourceUid.test.ts b/superset-frontend/src/utils/getDatasourceUid.test.ts index d3a629efcfed3..2d14fde26230c 100644 --- a/superset-frontend/src/utils/getDatasourceUid.test.ts +++ b/superset-frontend/src/utils/getDatasourceUid.test.ts @@ -25,7 +25,7 @@ const TEST_DATASOURCE = { type: DatasourceType.Table, columns: [], metrics: [], - column_formats: {}, + column_format: {}, verbose_map: {}, main_dttm_col: '__timestamp', // eg. ['["ds", true]', 'ds [asc]'] diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py index 2cb0d54c51537..30cb5a4030255 100644 --- a/superset/connectors/base/models.py +++ b/superset/connectors/base/models.py @@ -19,18 +19,7 @@ import json from datetime import datetime from enum import Enum -from typing import ( - Any, - Dict, - Hashable, - List, - Optional, - Set, - Tuple, - Type, - TYPE_CHECKING, - Union, -) +from typing import Any, Dict, Hashable, List, Optional, Set, Type, TYPE_CHECKING, Union from flask_appbuilder.security.sqla.models import User from flask_babel import gettext as __ @@ -253,33 +242,26 @@ def select_star(self) -> Optional[str]: pass @property - def order_by_choices(self) -> List[Tuple[str, str]]: - choices = [] + def data(self) -> Dict[str, Any]: + """Data representation of the datasource sent to the frontend""" + order_by_choices = [] # self.column_names return sorted column_names for column_name in self.column_names: column_name = str(column_name or "") - choices.append( + order_by_choices.append( (json.dumps([column_name, True]), f"{column_name} " + __("[asc]")) ) - choices.append( + order_by_choices.append( (json.dumps([column_name, False]), f"{column_name} " + __("[desc]")) ) - return choices - @property - def verbose_map(self) -> Dict[str, str]: - verb_map = {"__timestamp": "Time"} - verb_map.update( + verbose_map = {"__timestamp": "Time"} + verbose_map.update( {o.metric_name: o.verbose_name or o.metric_name for o in self.metrics} ) - verb_map.update( + verbose_map.update( {o.column_name: o.verbose_name or o.column_name for o in self.columns} ) - return verb_map - - @property - def data(self) -> Dict[str, Any]: - """Data representation of the datasource sent to the frontend""" return { # simple fields "id": self.id, @@ -306,9 +288,9 @@ def data(self) -> Dict[str, Any]: "columns": [o.data for o in self.columns], "metrics": [o.data for o in self.metrics], # TODO deprecate, move logic to JS - "order_by_choices": self.order_by_choices, + "order_by_choices": order_by_choices, "owners": [owner.id for owner in self.owners], - "verbose_map": self.verbose_map, + "verbose_map": verbose_map, "select_star": self.select_star, } diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 1a5dd0037e058..6180a546e7500 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -785,20 +785,14 @@ def health_check_message(self) -> Optional[str]: check = config["DATASET_HEALTH_CHECK"] return check(self) if check else None - @property - def granularity_sqla(self) -> List[Tuple[Any, Any]]: - return utils.choicify(self.dttm_cols) - - @property - def time_grain_sqla(self) -> List[Tuple[Any, Any]]: - return [(g.duration, g.name) for g in self.database.grains() or []] - @property def data(self) -> Dict[str, Any]: data_ = super().data if self.type == "table": - data_["granularity_sqla"] = self.granularity_sqla - data_["time_grain_sqla"] = self.time_grain_sqla + data_["granularity_sqla"] = utils.choicify(self.dttm_cols) + data_["time_grain_sqla"] = [ + (g.duration, g.name) for g in self.database.grains() or [] + ] data_["main_dttm_col"] = self.main_dttm_col data_["fetch_values_predicate"] = self.fetch_values_predicate data_["template_params"] = self.template_params diff --git a/superset/datasets/api.py b/superset/datasets/api.py index 12de82c780d43..48c429d32d4b1 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -195,14 +195,6 @@ class DatasetRestApi(BaseSupersetModelRestApi): "database.backend", "columns.advanced_data_type", "is_managed_externally", - "uid", - "datasource_name", - "name", - "column_formats", - "granularity_sqla", - "time_grain_sqla", - "order_by_choices", - "verbose_map", ] add_model_schema = DatasetPostSchema() edit_model_schema = DatasetPutSchema() diff --git a/superset/datasets/commands/exceptions.py b/superset/datasets/commands/exceptions.py index e06e92802f04c..91af2fdde4e9c 100644 --- a/superset/datasets/commands/exceptions.py +++ b/superset/datasets/commands/exceptions.py @@ -61,23 +61,6 @@ def __init__(self, table_name: str) -> None: ) -class DatasetEndpointUnsafeValidationError(ValidationError): - """ - Marshmallow validation error for unsafe dataset default endpoint - """ - - def __init__(self) -> None: - super().__init__( - [ - _( - "The submitted URL is not considered safe," - " only use URLs with the same domain as Superset." - ) - ], - field_name="default_endpoint", - ) - - class DatasetColumnNotFoundValidationError(ValidationError): """ Marshmallow validation error when dataset column for update does not exist diff --git a/superset/datasets/commands/update.py b/superset/datasets/commands/update.py index a2e483ba93ddb..483a98e76c3b8 100644 --- a/superset/datasets/commands/update.py +++ b/superset/datasets/commands/update.py @@ -18,7 +18,6 @@ from collections import Counter from typing import Any, Dict, List, Optional -from flask import current_app from flask_appbuilder.models.sqla import Model from marshmallow import ValidationError @@ -31,7 +30,6 @@ DatasetColumnNotFoundValidationError, DatasetColumnsDuplicateValidationError, DatasetColumnsExistsValidationError, - DatasetEndpointUnsafeValidationError, DatasetExistsValidationError, DatasetForbiddenError, DatasetInvalidError, @@ -43,7 +41,6 @@ ) from superset.datasets.dao import DatasetDAO from superset.exceptions import SupersetSecurityException -from superset.utils.urls import is_safe_url logger = logging.getLogger(__name__) @@ -104,14 +101,6 @@ def validate(self) -> None: self._properties["owners"] = owners except ValidationError as ex: exceptions.append(ex) - # Validate default URL safety - default_endpoint = self._properties.get("default_endpoint") - if ( - default_endpoint - and not is_safe_url(default_endpoint) - and current_app.config["PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET"] - ): - exceptions.append(DatasetEndpointUnsafeValidationError()) # Validate columns columns = self._properties.get("columns") diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py index 1d49bc1cfb432..103359a2c3f03 100644 --- a/superset/datasets/schemas.py +++ b/superset/datasets/schemas.py @@ -55,7 +55,7 @@ class DatasetColumnsPutSchema(Schema): extra = fields.String(allow_none=True) filterable = fields.Boolean() groupby = fields.Boolean() - is_active = fields.Boolean(allow_none=True) + is_active = fields.Boolean() is_dttm = fields.Boolean(default=False) python_date_format = fields.String( allow_none=True, validate=[Length(1, 255), validate_python_date_format] diff --git a/superset/views/datasource/views.py b/superset/views/datasource/views.py index 77cb47d19957a..4f158e836936a 100644 --- a/superset/views/datasource/views.py +++ b/superset/views/datasource/views.py @@ -44,7 +44,6 @@ from superset.views.base import ( api, BaseSupersetView, - deprecated, handle_api_exception, json_error_response, ) @@ -70,7 +69,6 @@ class Datasource(BaseSupersetView): @has_access_api @api @handle_api_exception - @deprecated(new_target="/api/v1/dataset/") def save(self) -> FlaskResponse: data = request.form.get("data") if not isinstance(data, str): @@ -135,7 +133,6 @@ def save(self) -> FlaskResponse: @has_access_api @api @handle_api_exception - @deprecated(new_target="/api/v1/dataset/") def get(self, datasource_type: str, datasource_id: int) -> FlaskResponse: datasource = DatasourceDAO.get_datasource( db.session, DatasourceType(datasource_type), datasource_id diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index 60a8ee58bdf84..1762a46305b45 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -19,7 +19,7 @@ import unittest from io import BytesIO from typing import List, Optional -from unittest.mock import ANY, patch +from unittest.mock import patch from zipfile import is_zipfile, ZipFile import prison @@ -347,28 +347,6 @@ def test_get_dataset_item(self): "sql": None, "table_name": "energy_usage", "template_params": None, - "uid": "2__table", - "datasource_name": "energy_usage", - "name": f"{get_example_default_schema()}.energy_usage", - "column_formats": {}, - "granularity_sqla": [], - "time_grain_sqla": ANY, - "order_by_choices": [ - ['["source", true]', "source [asc]"], - ['["source", false]', "source [desc]"], - ['["target", true]', "target [asc]"], - ['["target", false]', "target [desc]"], - ['["value", true]', "value [asc]"], - ['["value", false]', "value [desc]"], - ], - "verbose_map": { - "__timestamp": "Time", - "count": "COUNT(*)", - "source": "source", - "sum__value": "sum__value", - "target": "target", - "value": "value", - }, } if response["result"]["database"]["backend"] not in ("presto", "hive"): assert { @@ -1372,32 +1350,6 @@ def test_update_dataset_item_uniqueness(self): db.session.delete(ab_user) db.session.commit() - def test_update_dataset_unsafe_default_endpoint(self): - """ - Dataset API: Test unsafe default endpoint - """ - if backend() == "sqlite": - return - - dataset = self.insert_default_dataset() - self.login(username="admin") - uri = f"api/v1/dataset/{dataset.id}" - table_data = {"default_endpoint": "http://www.google.com"} - rv = self.client.put(uri, json=table_data) - data = json.loads(rv.data.decode("utf-8")) - assert rv.status_code == 422 - expected_response = { - "message": { - "default_endpoint": [ - "The submitted URL is not considered safe," - " only use URLs with the same domain as Superset." - ] - } - } - assert data == expected_response - db.session.delete(dataset) - db.session.commit() - @patch("superset.datasets.dao.DatasetDAO.update") def test_update_dataset_sqlalchemy_error(self, mock_dao_update): """