From 55c8f9ba600b1de4f8ed08db6a5ff7ab2d33924f Mon Sep 17 00:00:00 2001 From: Jesse Yang Date: Mon, 25 Jan 2021 15:09:03 -0800 Subject: [PATCH] feat(explore): allow opening charts with missing dataset (#12705) --- .../components/DatasourceControl_spec.jsx | 2 +- .../src/common/components/index.tsx | 1 + .../src/explore/components/Control.tsx | 4 +- .../explore/components/DatasourcePanel.tsx | 62 +++--------- .../components/controls/DatasourceControl.jsx | 49 +++++++++- superset/commands/exceptions.py | 2 +- superset/commands/utils.py | 4 +- superset/connectors/connector_registry.py | 17 +++- superset/connectors/druid/views.py | 4 +- superset/datasets/api.py | 1 + superset/datasets/commands/exceptions.py | 10 +- superset/datasets/commands/importers/v0.py | 15 ++- superset/models/slice.py | 2 +- superset/models/tags.py | 7 +- .../translations/de/LC_MESSAGES/messages.json | 4 +- .../translations/de/LC_MESSAGES/messages.po | 4 +- .../translations/en/LC_MESSAGES/messages.json | 4 +- .../translations/en/LC_MESSAGES/messages.po | 4 +- .../translations/es/LC_MESSAGES/messages.json | 4 +- .../translations/es/LC_MESSAGES/messages.po | 4 +- .../translations/fr/LC_MESSAGES/messages.json | 6 +- .../translations/fr/LC_MESSAGES/messages.po | 4 +- .../translations/it/LC_MESSAGES/messages.json | 4 +- .../translations/it/LC_MESSAGES/messages.po | 4 +- .../translations/ja/LC_MESSAGES/messages.json | 4 +- .../translations/ja/LC_MESSAGES/messages.po | 4 +- .../translations/ko/LC_MESSAGES/messages.json | 4 +- .../translations/ko/LC_MESSAGES/messages.po | 4 +- superset/translations/messages.pot | 4 +- .../translations/pt/LC_MESSAGES/message.json | 6 +- .../translations/pt/LC_MESSAGES/message.po | 4 +- .../translations/pt/LC_MESSAGES/messages.json | 4 +- .../pt_BR/LC_MESSAGES/messages.json | 6 +- .../pt_BR/LC_MESSAGES/messages.po | 4 +- .../translations/ru/LC_MESSAGES/messages.json | 4 +- .../translations/ru/LC_MESSAGES/messages.po | 4 +- .../translations/zh/LC_MESSAGES/messages.json | 4 +- .../translations/zh/LC_MESSAGES/messages.po | 6 +- superset/utils/core.py | 3 + superset/views/base.py | 7 +- superset/views/core.py | 97 ++++++++++--------- superset/views/datasource.py | 37 +++---- superset/views/utils.py | 8 +- tests/base_tests.py | 15 +++ tests/charts/api_tests.py | 21 ++-- tests/datasets/api_tests.py | 7 +- tests/datasource_tests.py | 48 +++++---- 47 files changed, 278 insertions(+), 249 deletions(-) diff --git a/superset-frontend/spec/javascripts/explore/components/DatasourceControl_spec.jsx b/superset-frontend/spec/javascripts/explore/components/DatasourceControl_spec.jsx index 2996dcd1111a5..5891327c3b858 100644 --- a/superset-frontend/spec/javascripts/explore/components/DatasourceControl_spec.jsx +++ b/superset-frontend/spec/javascripts/explore/components/DatasourceControl_spec.jsx @@ -99,7 +99,7 @@ describe('DatasourceControl', () => { const wrapper = setup(); const alert = wrapper.find(Icon); expect(alert.at(1).prop('name')).toBe('alert-solid'); - const tooltip = wrapper.find(Tooltip).at(1); + const tooltip = wrapper.find(Tooltip).at(0); expect(tooltip.prop('title')).toBe( defaultProps.datasource.health_check_message, ); diff --git a/superset-frontend/src/common/components/index.tsx b/superset-frontend/src/common/components/index.tsx index 5c66af5d44f0c..f56b09e4fd569 100644 --- a/superset-frontend/src/common/components/index.tsx +++ b/superset-frontend/src/common/components/index.tsx @@ -28,6 +28,7 @@ import { DropDownProps } from 'antd/lib/dropdown'; */ // eslint-disable-next-line no-restricted-imports export { + Alert, AutoComplete, Avatar, Button, diff --git a/superset-frontend/src/explore/components/Control.tsx b/superset-frontend/src/explore/components/Control.tsx index e728ad59d9045..f1b6925bd6935 100644 --- a/superset-frontend/src/explore/components/Control.tsx +++ b/superset-frontend/src/explore/components/Control.tsx @@ -29,9 +29,9 @@ export type ControlProps = { // signature to the original action factory. actions: Partial & Pick; type: ControlType; - label: string; + label?: ReactNode; name: string; - description?: string; + description?: ReactNode; tooltipOnClick?: () => ReactNode; places?: number; rightNode?: ReactNode; diff --git a/superset-frontend/src/explore/components/DatasourcePanel.tsx b/superset-frontend/src/explore/components/DatasourcePanel.tsx index c35bd0231ce34..7beebd2d7cb9a 100644 --- a/superset-frontend/src/explore/components/DatasourcePanel.tsx +++ b/superset-frontend/src/explore/components/DatasourcePanel.tsx @@ -17,57 +17,25 @@ * under the License. */ import React, { useEffect, useState } from 'react'; -import { styled, t, QueryFormData } from '@superset-ui/core'; +import { styled, t } from '@superset-ui/core'; import { Collapse } from 'src/common/components'; import { ColumnOption, MetricOption, - ControlType, + ControlConfig, + DatasourceMeta, } from '@superset-ui/chart-controls'; import { debounce } from 'lodash'; import { matchSorter, rankings } from 'match-sorter'; import { ExploreActions } from '../actions/exploreActions'; import Control from './Control'; -interface DatasourceControl { - validationErrors: Array; - mapStateToProps: QueryFormData; - type: ControlType; - label: string; - datasource?: DatasourceControl; +interface DatasourceControl extends ControlConfig { + datasource?: DatasourceMeta; } -type Columns = { - column_name: string; - description: string | undefined; - expression: string | undefined; - filterable: boolean; - groupby: string | undefined; - id: number; - is_dttm: boolean; - python_date_format: string; - type: string; - verbose_name: string; -}; - -type Metrics = { - certification_details: string | undefined; - certified_by: string | undefined; - d3format: string | undefined; - description: string | undefined; - expression: string; - id: number; - is_certified: boolean; - metric_name: string; - verbose_name: string; - warning_text: string; -}; - interface Props { - datasource: { - columns: Array; - metrics: Array; - }; + datasource: DatasourceMeta; controls: { datasource: DatasourceControl; }; @@ -193,15 +161,8 @@ export default function DataSourcePanel({ const metricSlice = lists.metrics.slice(0, 50); const columnSlice = lists.columns.slice(0, 50); - return ( - - + const mainBody = ( + <> { @@ -245,6 +206,13 @@ export default function DataSourcePanel({ + + ); + + return ( + + + {datasource.id != null && mainBody} ); } diff --git a/superset-frontend/src/explore/components/controls/DatasourceControl.jsx b/superset-frontend/src/explore/components/controls/DatasourceControl.jsx index d531d55c45b35..88e804da185b3 100644 --- a/superset-frontend/src/explore/components/controls/DatasourceControl.jsx +++ b/superset-frontend/src/explore/components/controls/DatasourceControl.jsx @@ -26,6 +26,8 @@ import Icon from 'src/components/Icon'; import ChangeDatasourceModal from 'src/datasource/ChangeDatasourceModal'; import DatasourceModal from 'src/datasource/DatasourceModal'; import { postForm } from 'src/explore/exploreUtils'; +import Button from 'src/components/Button'; +import ErrorAlert from 'src/components/ErrorMessage/ErrorAlert'; const propTypes = { actions: PropTypes.object.isRequired, @@ -51,6 +53,9 @@ const Styles = styled.div` border-bottom: 1px solid ${({ theme }) => theme.colors.grayscale.light2}; padding: ${({ theme }) => 2 * theme.gridUnit}px; } + .error-alert { + margin: ${({ theme }) => 2 * theme.gridUnit}px; + } .ant-dropdown-trigger { margin-left: ${({ theme }) => 2 * theme.gridUnit}px; box-shadow: none; @@ -152,6 +157,7 @@ class DatasourceControl extends React.PureComponent { render() { const { showChangeDatasourceModal, showEditDatasourceModal } = this.state; const { datasource, onChange } = this.props; + const isMissingDatasource = datasource; const datasourceMenu = ( {this.props.isEditable && ( @@ -164,16 +170,22 @@ class DatasourceControl extends React.PureComponent { ); - // eslint-disable-next-line camelcase const { health_check_message: healthCheckMessage } = datasource; return (
- - {datasource.name} - + {/* Add a tooltip only for long dataset names */} + {!isMissingDatasource && datasource.name.length > 25 ? ( + + {datasource.name} + + ) : ( + + {datasource.name} + + )} {healthCheckMessage && (
+ {/* missing dataset */} + {isMissingDatasource && ( +
+ +

+ {t( + 'The dataset linked to this chart may have been deleted.', + )} +

+

+ +

+ + } + /> +
+ )} {showEditDatasourceModal && ( None: - super().__init__([_("Datasource does not exist")], field_name="datasource_id") + super().__init__([_("Dataset does not exist")], field_name="datasource_id") diff --git a/superset/commands/utils.py b/superset/commands/utils.py index c0bd8b707055d..874ea4bc5fb93 100644 --- a/superset/commands/utils.py +++ b/superset/commands/utils.py @@ -17,7 +17,6 @@ from typing import List, Optional from flask_appbuilder.security.sqla.models import User -from sqlalchemy.orm.exc import NoResultFound from superset.commands.exceptions import ( DatasourceNotFoundValidationError, @@ -25,6 +24,7 @@ ) from superset.connectors.base.models import BaseDatasource from superset.connectors.connector_registry import ConnectorRegistry +from superset.datasets.commands.exceptions import DatasetNotFoundError from superset.extensions import db, security_manager @@ -53,5 +53,5 @@ def get_datasource_by_id(datasource_id: int, datasource_type: str) -> BaseDataso return ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.session ) - except (NoResultFound, KeyError): + except DatasetNotFoundError: raise DatasourceNotFoundValidationError() diff --git a/superset/connectors/connector_registry.py b/superset/connectors/connector_registry.py index fa9a54bda76f3..0931fd9e55679 100644 --- a/superset/connectors/connector_registry.py +++ b/superset/connectors/connector_registry.py @@ -19,6 +19,8 @@ from sqlalchemy import or_ from sqlalchemy.orm import Session, subqueryload +from superset.datasets.commands.exceptions import DatasetNotFoundError + if TYPE_CHECKING: from collections import OrderedDict @@ -44,12 +46,23 @@ def register_sources(cls, datasource_config: "OrderedDict[str, List[str]]") -> N def get_datasource( cls, datasource_type: str, datasource_id: int, session: Session ) -> "BaseDatasource": - return ( + """Safely get a datasource instance, raises `DatasetNotFoundError` if + `datasource_type` is not registered or `datasource_id` does not + exist.""" + if datasource_type not in cls.sources: + raise DatasetNotFoundError() + + datasource = ( session.query(cls.sources[datasource_type]) .filter_by(id=datasource_id) - .one() + .one_or_none() ) + if not datasource: + raise DatasetNotFoundError() + + return datasource + @classmethod def get_all_datasources(cls, session: Session) -> List["BaseDatasource"]: datasources: List["BaseDatasource"] = [] diff --git a/superset/connectors/druid/views.py b/superset/connectors/druid/views.py index 112bcd176d710..4b2b45bb66c07 100644 --- a/superset/connectors/druid/views.py +++ b/superset/connectors/druid/views.py @@ -39,7 +39,7 @@ BaseSupersetView, DatasourceFilter, DeleteMixin, - get_datasource_exist_error_msg, + get_dataset_exist_error_msg, ListWidgetWithCheckboxes, SupersetModelView, validate_json, @@ -352,7 +352,7 @@ def pre_add(self, item: "DruidDatasourceModelView") -> None: models.DruidDatasource.cluster_id == item.cluster_id, ) if db.session.query(query.exists()).scalar(): - raise Exception(get_datasource_exist_error_msg(item.full_name)) + raise Exception(get_dataset_exist_error_msg(item.full_name)) def post_add(self, item: "DruidDatasourceModelView") -> None: item.refresh_metrics() diff --git a/superset/datasets/api.py b/superset/datasets/api.py index af235ced15bea..96f3532265da0 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -231,6 +231,7 @@ def post(self) -> Response: # This validates custom Schema with custom validations except ValidationError as error: return self.response_400(message=error.messages) + try: new_model = CreateDatasetCommand(g.user, item).run() return self.response(201, id=new_model.id, result=item) diff --git a/superset/datasets/commands/exceptions.py b/superset/datasets/commands/exceptions.py index 03071cac63948..44064f07cc01a 100644 --- a/superset/datasets/commands/exceptions.py +++ b/superset/datasets/commands/exceptions.py @@ -26,7 +26,10 @@ ImportFailedError, UpdateFailedError, ) -from superset.views.base import get_datasource_exist_error_msg + + +def get_dataset_exist_error_msg(full_name: str) -> str: + return _("Dataset %(name)s already exists", name=full_name) class DatabaseNotFoundValidationError(ValidationError): @@ -54,7 +57,7 @@ class DatasetExistsValidationError(ValidationError): def __init__(self, table_name: str) -> None: super().__init__( - get_datasource_exist_error_msg(table_name), field_name="table_name" + [get_dataset_exist_error_msg(table_name)], field_name="table_name" ) @@ -142,7 +145,8 @@ def __init__(self) -> None: class DatasetNotFoundError(CommandException): - message = "Dataset not found." + status = 404 + message = _("Dataset does not exist") class DatasetInvalidError(CommandInvalidError): diff --git a/superset/datasets/commands/importers/v0.py b/superset/datasets/commands/importers/v0.py index df02a393e17ec..a19e9ae05bf5e 100644 --- a/superset/datasets/commands/importers/v0.py +++ b/superset/datasets/commands/importers/v0.py @@ -21,7 +21,6 @@ import yaml from flask_appbuilder import Model from sqlalchemy.orm import Session -from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.session import make_transient from superset import db @@ -56,14 +55,14 @@ def lookup_sqla_table(table: SqlaTable) -> Optional[SqlaTable]: def lookup_sqla_database(table: SqlaTable) -> Optional[Database]: - try: - return ( - db.session.query(Database) - .filter_by(database_name=table.params_dict["database_name"]) - .one() - ) - except NoResultFound: + database = ( + db.session.query(Database) + .filter_by(database_name=table.params_dict["database_name"]) + .one_or_none() + ) + if database is None: raise DatabaseNotFoundError + return database def lookup_druid_cluster(datasource: DruidDatasource) -> Optional[DruidCluster]: diff --git a/superset/models/slice.py b/superset/models/slice.py index 2fd55a7aceef6..7461fa8ee5a3b 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -206,7 +206,7 @@ def digest(self) -> str: """ Returns a MD5 HEX digest that makes this dashboard unique """ - return utils.md5_hex(self.params) + return utils.md5_hex(self.params or "") @property def thumbnail_url(self) -> str: diff --git a/superset/models/tags.py b/superset/models/tags.py index 3f508ff6c6fb0..722c5b099e1e0 100644 --- a/superset/models/tags.py +++ b/superset/models/tags.py @@ -23,7 +23,6 @@ from sqlalchemy import Column, Enum, ForeignKey, Integer, String from sqlalchemy.engine.base import Connection from sqlalchemy.orm import relationship, Session, sessionmaker -from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.mapper import Mapper from superset.models.helpers import AuditMixinNullable @@ -89,13 +88,11 @@ class TaggedObject(Model, AuditMixinNullable): def get_tag(name: str, session: Session, type_: TagTypes) -> Tag: - try: - tag = session.query(Tag).filter_by(name=name, type=type_).one() - except NoResultFound: + tag = session.query(Tag).filter_by(name=name, type=type_).one_or_none() + if tag is None: tag = Tag(name=name, type=type_) session.add(tag) session.commit() - return tag diff --git a/superset/translations/de/LC_MESSAGES/messages.json b/superset/translations/de/LC_MESSAGES/messages.json index 61175cb759684..72de85be7761b 100644 --- a/superset/translations/de/LC_MESSAGES/messages.json +++ b/superset/translations/de/LC_MESSAGES/messages.json @@ -225,7 +225,7 @@ "Charts could not be deleted.": [""], "Import chart failed for an unknown reason": [""], "Owners are invalid": [""], - "Datasource does not exist": ["Datenquellen"], + "Dataset does not exist": ["Datenquellen"], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": ["Druid Datenquelle einfügen"], @@ -643,7 +643,7 @@ "Add Annotation Layer": ["Anmerkungstufe"], "Edit Annotation Layer": ["Anmerkungstufe"], "Name": ["Name"], - "Datasource %(name)s already exists": [""], + "Dataset %(name)s already exists": [""], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ "" ], diff --git a/superset/translations/de/LC_MESSAGES/messages.po b/superset/translations/de/LC_MESSAGES/messages.po index 8aecafa0ee60e..8cd006f2cd944 100644 --- a/superset/translations/de/LC_MESSAGES/messages.po +++ b/superset/translations/de/LC_MESSAGES/messages.po @@ -776,7 +776,7 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "Datenquellen" #: superset/common/query_object.py:301 @@ -2303,7 +2303,7 @@ msgstr "Name" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "" #: superset/views/base.py:227 diff --git a/superset/translations/en/LC_MESSAGES/messages.json b/superset/translations/en/LC_MESSAGES/messages.json index e9e28a898c6ae..28521e4d97f71 100644 --- a/superset/translations/en/LC_MESSAGES/messages.json +++ b/superset/translations/en/LC_MESSAGES/messages.json @@ -200,7 +200,7 @@ "Charts could not be deleted.": [""], "Import chart failed for an unknown reason": [""], "Owners are invalid": [""], - "Datasource does not exist": [""], + "Dataset does not exist": [""], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": [""], @@ -585,7 +585,7 @@ "Add Annotation Layer": [""], "Edit Annotation Layer": [""], "Name": [""], - "Datasource %(name)s already exists": [""], + "Dataset %(name)s already exists": [""], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ "" ], diff --git a/superset/translations/en/LC_MESSAGES/messages.po b/superset/translations/en/LC_MESSAGES/messages.po index 7158ff013852c..86c784cdb7028 100644 --- a/superset/translations/en/LC_MESSAGES/messages.po +++ b/superset/translations/en/LC_MESSAGES/messages.po @@ -775,7 +775,7 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "" #: superset/common/query_object.py:301 @@ -2302,7 +2302,7 @@ msgstr "" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "" #: superset/views/base.py:227 diff --git a/superset/translations/es/LC_MESSAGES/messages.json b/superset/translations/es/LC_MESSAGES/messages.json index b8e088da76ed1..7ad0696a40468 100644 --- a/superset/translations/es/LC_MESSAGES/messages.json +++ b/superset/translations/es/LC_MESSAGES/messages.json @@ -272,7 +272,7 @@ "Charts could not be deleted.": ["Los Gráficos no han podido eliminarse"], "Import chart failed for an unknown reason": [""], "Owners are invalid": ["Los propietarios son invalidos"], - "Datasource does not exist": ["La fuente no existe"], + "Dataset does not exist": ["La fuente no existe"], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": ["Añadiendo [{}] como nueva fuente"], @@ -696,7 +696,7 @@ "Add Annotation Layer": [""], "Edit Annotation Layer": [""], "Name": ["Nombre"], - "Datasource %(name)s already exists": [ + "Dataset %(name)s already exists": [ "La fuente de datos %(name)s ya existe" ], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ diff --git a/superset/translations/es/LC_MESSAGES/messages.po b/superset/translations/es/LC_MESSAGES/messages.po index 19afda9256112..c916cc55ddcdf 100644 --- a/superset/translations/es/LC_MESSAGES/messages.po +++ b/superset/translations/es/LC_MESSAGES/messages.po @@ -784,7 +784,7 @@ msgid "Owners are invalid" msgstr "Los propietarios son invalidos" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "La fuente no existe" #: superset/common/query_object.py:301 @@ -2336,7 +2336,7 @@ msgstr "Nombre" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "La fuente de datos %(name)s ya existe" #: superset/views/base.py:227 diff --git a/superset/translations/fr/LC_MESSAGES/messages.json b/superset/translations/fr/LC_MESSAGES/messages.json index 5629a9d4bc225..151f527b764ce 100644 --- a/superset/translations/fr/LC_MESSAGES/messages.json +++ b/superset/translations/fr/LC_MESSAGES/messages.json @@ -277,9 +277,7 @@ "Charts could not be deleted.": ["La requête ne peut pas être chargée"], "Import chart failed for an unknown reason": [""], "Owners are invalid": [""], - "Datasource does not exist": [ - "La source de données %(name)s existe déjà" - ], + "Dataset does not exist": ["La source de données %(name)s existe déjà"], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": ["Ajouter une source de données Druid"], @@ -728,7 +726,7 @@ "Add Annotation Layer": ["Ajouter une couche d'annotation"], "Edit Annotation Layer": ["Ajouter une couche d'annotation"], "Name": ["Nom"], - "Datasource %(name)s already exists": [ + "Dataset %(name)s already exists": [ "La source de données %(name)s existe déjà" ], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ diff --git a/superset/translations/fr/LC_MESSAGES/messages.po b/superset/translations/fr/LC_MESSAGES/messages.po index 40b28274825f3..660cceb3c13c3 100644 --- a/superset/translations/fr/LC_MESSAGES/messages.po +++ b/superset/translations/fr/LC_MESSAGES/messages.po @@ -781,7 +781,7 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "La source de données %(name)s existe déjà" #: superset/common/query_object.py:301 @@ -2350,7 +2350,7 @@ msgstr "Nom" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "La source de données %(name)s existe déjà" #: superset/views/base.py:227 diff --git a/superset/translations/it/LC_MESSAGES/messages.json b/superset/translations/it/LC_MESSAGES/messages.json index d479c62a3c8d9..da4561b43f0a6 100644 --- a/superset/translations/it/LC_MESSAGES/messages.json +++ b/superset/translations/it/LC_MESSAGES/messages.json @@ -237,7 +237,7 @@ "Charts could not be deleted.": ["La query non può essere caricata"], "Import chart failed for an unknown reason": [""], "Owners are invalid": [""], - "Datasource does not exist": ["Sorgente dati e tipo di grafico"], + "Dataset does not exist": ["Sorgente dati e tipo di grafico"], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": [""], @@ -643,7 +643,7 @@ "Add Annotation Layer": [""], "Edit Annotation Layer": [""], "Name": ["Nome"], - "Datasource %(name)s already exists": [""], + "Dataset %(name)s already exists": [""], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ "" ], diff --git a/superset/translations/it/LC_MESSAGES/messages.po b/superset/translations/it/LC_MESSAGES/messages.po index 3b80e840885c1..1157574f3c50e 100644 --- a/superset/translations/it/LC_MESSAGES/messages.po +++ b/superset/translations/it/LC_MESSAGES/messages.po @@ -773,7 +773,7 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "Sorgente dati e tipo di grafico" #: superset/common/query_object.py:301 @@ -2331,7 +2331,7 @@ msgstr "Nome" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "" #: superset/views/base.py:227 diff --git a/superset/translations/ja/LC_MESSAGES/messages.json b/superset/translations/ja/LC_MESSAGES/messages.json index 2cd8c1abb245a..7f7284faa0a75 100644 --- a/superset/translations/ja/LC_MESSAGES/messages.json +++ b/superset/translations/ja/LC_MESSAGES/messages.json @@ -213,7 +213,7 @@ "Charts could not be deleted.": [""], "Import chart failed for an unknown reason": [""], "Owners are invalid": [""], - "Datasource does not exist": ["データソース"], + "Dataset does not exist": ["データソース"], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": [""], @@ -601,7 +601,7 @@ "Add Annotation Layer": [""], "Edit Annotation Layer": [""], "Name": ["名前"], - "Datasource %(name)s already exists": [""], + "Dataset %(name)s already exists": [""], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ "" ], diff --git a/superset/translations/ja/LC_MESSAGES/messages.po b/superset/translations/ja/LC_MESSAGES/messages.po index 697631c824693..089826c792da6 100644 --- a/superset/translations/ja/LC_MESSAGES/messages.po +++ b/superset/translations/ja/LC_MESSAGES/messages.po @@ -772,7 +772,7 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "データソース" #: superset/common/query_object.py:301 @@ -2294,7 +2294,7 @@ msgstr "名前" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "" #: superset/views/base.py:227 diff --git a/superset/translations/ko/LC_MESSAGES/messages.json b/superset/translations/ko/LC_MESSAGES/messages.json index fea747fde8ac8..f756a7472cedb 100644 --- a/superset/translations/ko/LC_MESSAGES/messages.json +++ b/superset/translations/ko/LC_MESSAGES/messages.json @@ -197,7 +197,7 @@ "Charts could not be deleted.": [""], "Import chart failed for an unknown reason": [""], "Owners are invalid": [""], - "Datasource does not exist": ["데이터소스"], + "Dataset does not exist": ["데이터소스"], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": ["새 데이터소스 스캔"], @@ -579,7 +579,7 @@ "Add Annotation Layer": [""], "Edit Annotation Layer": ["주석 레이어"], "Name": ["이름"], - "Datasource %(name)s already exists": [""], + "Dataset %(name)s already exists": [""], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ "" ], diff --git a/superset/translations/ko/LC_MESSAGES/messages.po b/superset/translations/ko/LC_MESSAGES/messages.po index 74e9512914185..7168410c21a29 100644 --- a/superset/translations/ko/LC_MESSAGES/messages.po +++ b/superset/translations/ko/LC_MESSAGES/messages.po @@ -772,7 +772,7 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "데이터소스" #: superset/common/query_object.py:301 @@ -2294,7 +2294,7 @@ msgstr "이름" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "" #: superset/views/base.py:227 diff --git a/superset/translations/messages.pot b/superset/translations/messages.pot index 283ac68deb0f4..4fd8fd09e188a 100644 --- a/superset/translations/messages.pot +++ b/superset/translations/messages.pot @@ -775,7 +775,7 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "" #: superset/common/query_object.py:301 @@ -2306,7 +2306,7 @@ msgstr "" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "" #: superset/views/base.py:227 diff --git a/superset/translations/pt/LC_MESSAGES/message.json b/superset/translations/pt/LC_MESSAGES/message.json index d71426ae879bc..dd45b3e9582da 100644 --- a/superset/translations/pt/LC_MESSAGES/message.json +++ b/superset/translations/pt/LC_MESSAGES/message.json @@ -258,7 +258,7 @@ "Charts could not be deleted.": ["Não foi possível carregar a query"], "Import chart failed for an unknown reason": [""], "Owners are invalid": [""], - "Datasource does not exist": ["Origem de dados %(name)s já existe"], + "Dataset does not exist": ["Origem de dados %(name)s já existe"], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": ["Adicionar origem de dados Druid"], @@ -693,9 +693,7 @@ "Add Annotation Layer": ["Camadas de anotação"], "Edit Annotation Layer": ["Camadas de anotação"], "Name": ["Nome"], - "Datasource %(name)s already exists": [ - "Origem de dados %(name)s já existe" - ], + "Dataset %(name)s already exists": ["Origem de dados %(name)s já existe"], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ "Tabela [{}] não encontrada, por favor verifique conexão à base de dados, esquema e nome da tabela" ], diff --git a/superset/translations/pt/LC_MESSAGES/message.po b/superset/translations/pt/LC_MESSAGES/message.po index a1187a1ad27f0..6ceaeaf93daf1 100644 --- a/superset/translations/pt/LC_MESSAGES/message.po +++ b/superset/translations/pt/LC_MESSAGES/message.po @@ -783,7 +783,7 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "Origem de dados %(name)s já existe" #: superset/common/query_object.py:297 @@ -2363,7 +2363,7 @@ msgstr "Nome" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "Origem de dados %(name)s já existe" #: superset/views/base.py:227 diff --git a/superset/translations/pt/LC_MESSAGES/messages.json b/superset/translations/pt/LC_MESSAGES/messages.json index 9243828de3017..9bb3d757611db 100644 --- a/superset/translations/pt/LC_MESSAGES/messages.json +++ b/superset/translations/pt/LC_MESSAGES/messages.json @@ -1150,9 +1150,7 @@ "Welcome!": ["Bem vindo!"], "Test Connection": ["Conexão de teste"], "Manage": ["Gerir"], - "Datasource %(name)s already exists": [ - "Origem de dados %(name)s já existe" - ], + "Dataset %(name)s already exists": ["Origem de dados %(name)s já existe"], "json isn't valid": ["json não é válido"], "Delete": ["Eliminar"], "Delete all Really?": ["Tem a certeza que pretende eliminar tudo?"], diff --git a/superset/translations/pt_BR/LC_MESSAGES/messages.json b/superset/translations/pt_BR/LC_MESSAGES/messages.json index 2974661afbb70..669f13bae685f 100644 --- a/superset/translations/pt_BR/LC_MESSAGES/messages.json +++ b/superset/translations/pt_BR/LC_MESSAGES/messages.json @@ -328,7 +328,7 @@ "A importação do gráfico falhou por um motivo desconhecido" ], "Owners are invalid": ["Donos inválidos"], - "Datasource does not exist": ["Fonte de dados não existe"], + "Dataset does not exist": ["Fonte de dados não existe"], "`operation` property of post processing object undefined": [ "A propriedade `operation` do objeto de pós processamento está indefinida" ], @@ -935,9 +935,7 @@ "Add Annotation Layer": ["Adicionar camada de anotação"], "Edit Annotation Layer": ["Editar camada de anotação"], "Name": ["Nome"], - "Datasource %(name)s already exists": [ - "Fonte de dados %(name)s já existe" - ], + "Dataset %(name)s already exists": ["Fonte de dados %(name)s já existe"], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ "Não foi possível localizar a tabela [%{table}s], por favor revise sua conexão com o banco de dados, esquema e nome da tabela. Erro: {}" ], diff --git a/superset/translations/pt_BR/LC_MESSAGES/messages.po b/superset/translations/pt_BR/LC_MESSAGES/messages.po index cb10cc83502db..2a89d39ea1099 100644 --- a/superset/translations/pt_BR/LC_MESSAGES/messages.po +++ b/superset/translations/pt_BR/LC_MESSAGES/messages.po @@ -801,7 +801,7 @@ msgid "Owners are invalid" msgstr "Donos inválidos" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "Fonte de dados não existe" #: superset/common/query_object.py:301 @@ -2439,7 +2439,7 @@ msgstr "Nome" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "Fonte de dados %(name)s já existe" #: superset/views/base.py:227 diff --git a/superset/translations/ru/LC_MESSAGES/messages.json b/superset/translations/ru/LC_MESSAGES/messages.json index 015b286f44c8e..e3464512a2be3 100644 --- a/superset/translations/ru/LC_MESSAGES/messages.json +++ b/superset/translations/ru/LC_MESSAGES/messages.json @@ -243,7 +243,7 @@ "Charts could not be deleted.": ["Запрос невозможно загрузить"], "Import chart failed for an unknown reason": [""], "Owners are invalid": [""], - "Datasource does not exist": ["Источник данных %(name)s уже существует"], + "Dataset does not exist": ["Источник данных %(name)s уже существует"], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": ["Добавить Источник Данных Druid"], @@ -659,7 +659,7 @@ "Add Annotation Layer": ["Добавить слой аннотации"], "Edit Annotation Layer": ["Добавить слой аннотации"], "Name": ["Название"], - "Datasource %(name)s already exists": [ + "Dataset %(name)s already exists": [ "Источник данных %(name)s уже существует" ], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ diff --git a/superset/translations/ru/LC_MESSAGES/messages.po b/superset/translations/ru/LC_MESSAGES/messages.po index 7b1d509373518..273175665e039 100644 --- a/superset/translations/ru/LC_MESSAGES/messages.po +++ b/superset/translations/ru/LC_MESSAGES/messages.po @@ -782,7 +782,7 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" +msgid "Dataset does not exist" msgstr "Источник данных %(name)s уже существует" #: superset/common/query_object.py:301 @@ -2335,7 +2335,7 @@ msgstr "Название" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "Источник данных %(name)s уже существует" #: superset/views/base.py:227 diff --git a/superset/translations/zh/LC_MESSAGES/messages.json b/superset/translations/zh/LC_MESSAGES/messages.json index d9f7e4a78cd1f..231643fc817f7 100644 --- a/superset/translations/zh/LC_MESSAGES/messages.json +++ b/superset/translations/zh/LC_MESSAGES/messages.json @@ -223,7 +223,7 @@ "Charts could not be deleted.": ["这个查询无法被加载"], "Import chart failed for an unknown reason": [""], "Owners are invalid": [""], - "Datasource does not exist": ["数据源%(name)s 已存在"], + "Dataset does not exist": ["数据集不存在"], "`operation` property of post processing object undefined": [""], "Unsupported post processing operation: %(operation)s": [""], "Adding new datasource [{}]": ["添加 Druid 数据源"], @@ -617,7 +617,7 @@ "Add Annotation Layer": ["添加注释层"], "Edit Annotation Layer": ["添加注释层"], "Name": ["名字"], - "Datasource %(name)s already exists": ["数据源%(name)s 已存在"], + "Dataset %(name)s already exists": ["数据源%(name)s 已存在"], "Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [ "找不到 [{}] 表,请仔细检查您的数据库连接、Schema 和 表名" ], diff --git a/superset/translations/zh/LC_MESSAGES/messages.po b/superset/translations/zh/LC_MESSAGES/messages.po index acc69d6faaf19..02f864f37aef4 100644 --- a/superset/translations/zh/LC_MESSAGES/messages.po +++ b/superset/translations/zh/LC_MESSAGES/messages.po @@ -773,8 +773,8 @@ msgid "Owners are invalid" msgstr "" #: superset/commands/exceptions.py:92 -msgid "Datasource does not exist" -msgstr "数据源%(name)s 已存在" +msgid "Dataset does not exist" +msgstr "数据集不存在" #: superset/common/query_object.py:301 msgid "`operation` property of post processing object undefined" @@ -2315,7 +2315,7 @@ msgstr "名字" #: superset/views/base.py:207 #, python-format -msgid "Datasource %(name)s already exists" +msgid "Dataset %(name)s already exists" msgstr "数据源%(name)s 已存在" #: superset/views/base.py:227 diff --git a/superset/utils/core.py b/superset/utils/core.py index 3d525b2db9819..313a812d5e562 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -71,6 +71,7 @@ from flask_appbuilder import SQLA from flask_appbuilder.security.sqla.models import Role, User from flask_babel import gettext as __ +from flask_babel.speaklater import LazyString from sqlalchemy import event, exc, select, Text from sqlalchemy.dialects.mysql import MEDIUMTEXT from sqlalchemy.engine import Connection, Engine @@ -504,6 +505,8 @@ def base_json_conv( # pylint: disable=inconsistent-return-statements,too-many-r return obj.decode("utf-8") except Exception: # pylint: disable=broad-except return "[bytes]" + if isinstance(obj, LazyString): + return str(obj) def json_iso_dttm_ser(obj: Any, pessimistic: bool = False) -> str: diff --git a/superset/views/base.py b/superset/views/base.py index 1f06731b98fa9..267bdb8ab614f 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -47,6 +47,7 @@ security_manager, ) from superset.connectors.sqla import models +from superset.datasets.commands.exceptions import get_dataset_exist_error_msg from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import ( SupersetErrorException, @@ -203,10 +204,6 @@ def wraps(self: "BaseSupersetView", *args: Any, **kwargs: Any) -> FlaskResponse: return functools.update_wrapper(wraps, f) -def get_datasource_exist_error_msg(full_name: str) -> str: - return __("Datasource %(name)s already exists", name=full_name) - - def validate_sqlatable(table: models.SqlaTable) -> None: """Checks the table existence in the database.""" with db.session.no_autoflush: @@ -216,7 +213,7 @@ def validate_sqlatable(table: models.SqlaTable) -> None: models.SqlaTable.database_id == table.database.id, ) if db.session.query(table_query.exists()).scalar(): - raise Exception(get_datasource_exist_error_msg(table.full_name)) + raise Exception(get_dataset_exist_error_msg(table.full_name)) # Fail before adding if the table can't be found try: diff --git a/superset/views/core.py b/superset/views/core.py index 0b717e9803c6b..087ef422b183f 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -59,6 +59,7 @@ viz, ) from superset.charts.dao import ChartDAO +from superset.connectors.base.models import BaseDatasource from superset.connectors.connector_registry import ConnectorRegistry from superset.connectors.sqla.models import ( AnnotationDatasource, @@ -70,6 +71,7 @@ from superset.dashboards.dao import DashboardDAO from superset.databases.dao import DatabaseDAO from superset.databases.filters import DatabaseFilter +from superset.datasets.commands.exceptions import DatasetNotFoundError from superset.exceptions import ( CacheLoadError, CertificateException, @@ -293,7 +295,7 @@ def clean_fulfilled_requests(session: Session) -> None: dar.datasource_type, dar.datasource_id, session, ) if not datasource or security_manager.can_access_datasource(datasource): - # datasource does not exist anymore + # Dataset does not exist anymore session.delete(dar) session.commit() @@ -695,50 +697,47 @@ def explore( # pylint: disable=too-many-locals,too-many-return-statements ) } ) - flash(Markup(config["SIP_15_TOAST_MESSAGE"].format(url=url))) - error_redirect = "/chart/list/" try: datasource_id, datasource_type = get_datasource_info( datasource_id, datasource_type, form_data ) - except SupersetException as ex: - flash( - _( - "Error occurred when opening the chart: %(error)s", - error=utils.error_msg_from_exception(ex), - ), - "danger", - ) - return redirect(error_redirect) + except SupersetException: + datasource_id = None + # fallback unkonw datasource to table type + datasource_type = SqlaTable.type - datasource = ConnectorRegistry.get_datasource( - cast(str, datasource_type), datasource_id, db.session - ) - if not datasource: - flash(DATASOURCE_MISSING_ERR, "danger") - return redirect(error_redirect) + datasource: Optional[BaseDatasource] = None + if datasource_id is not None: + try: + datasource = ConnectorRegistry.get_datasource( + cast(str, datasource_type), datasource_id, db.session + ) + except DatasetNotFoundError: + pass + datasource_name = datasource.name if datasource else _("[Missing Dataset]") - if config["ENABLE_ACCESS_REQUEST"] and ( - not security_manager.can_access_datasource(datasource) - ): - flash( - __(security_manager.get_datasource_access_error_msg(datasource)), - "danger", - ) - return redirect( - "superset/request_access/?" - f"datasource_type={datasource_type}&" - f"datasource_id={datasource_id}&" - ) + if datasource: + if config["ENABLE_ACCESS_REQUEST"] and ( + not security_manager.can_access_datasource(datasource) + ): + flash( + __(security_manager.get_datasource_access_error_msg(datasource)), + "danger", + ) + return redirect( + "superset/request_access/?" + f"datasource_type={datasource_type}&" + f"datasource_id={datasource_id}&" + ) - # if feature enabled, run some health check rules for sqla datasource - if hasattr(datasource, "health_check"): - datasource.health_check() + # if feature enabled, run some health check rules for sqla datasource + if hasattr(datasource, "health_check"): + datasource.health_check() viz_type = form_data.get("viz_type") - if not viz_type and datasource.default_endpoint: + if not viz_type and datasource and datasource.default_endpoint: return redirect(datasource.default_endpoint) # slc perms @@ -771,25 +770,31 @@ def explore( # pylint: disable=too-many-locals,too-many-return-statements status=400, ) - if action in ("saveas", "overwrite"): + if action in ("saveas", "overwrite") and datasource: return self.save_or_overwrite_slice( slc, slice_add_perm, slice_overwrite_perm, slice_download_perm, - datasource_id, - cast(str, datasource_type), + datasource.id, + datasource.type, datasource.name, ) standalone = ( request.args.get(utils.ReservedUrlParameters.STANDALONE.value) == "true" ) + dummy_datasource_data: Dict[str, Any] = { + "type": datasource_type, + "name": datasource_name, + "columns": [], + "metrics": [], + } bootstrap_data = { "can_add": slice_add_perm, "can_download": slice_download_perm, "can_overwrite": slice_overwrite_perm, - "datasource": datasource.data, + "datasource": datasource.data if datasource else dummy_datasource_data, "form_data": form_data, "datasource_id": datasource_id, "datasource_type": datasource_type, @@ -799,15 +804,18 @@ def explore( # pylint: disable=too-many-locals,too-many-return-statements "forced_height": request.args.get("height"), "common": common_bootstrap_payload(), } - table_name = ( - datasource.table_name - if datasource_type == "table" - else datasource.datasource_name - ) if slc: title = slc.slice_name - else: + elif datasource: + table_name = ( + datasource.table_name + if datasource_type == "table" + else datasource.datasource_name + ) title = _("Explore - %(table)s", table=table_name) + else: + title = _("Explore") + return self.render_template( "superset/basic.html", bootstrap_data=json.dumps( @@ -1626,6 +1634,7 @@ def warm_up_cache( # pylint: disable=too-many-locals,no-self-use table_name = request.args.get("table_name") db_name = request.args.get("db_name") extra_filters = request.args.get("extra_filters") + slices: List[Slice] = [] if not slice_id and not (table_name and db_name): return json_error_response( diff --git a/superset/views/datasource.py b/superset/views/datasource.py index 5c9a41da41d1c..d4ae9efc1fcf0 100644 --- a/superset/views/datasource.py +++ b/superset/views/datasource.py @@ -20,7 +20,7 @@ from flask import request from flask_appbuilder import expose from flask_appbuilder.security.decorators import has_access_api -from sqlalchemy.orm.exc import NoResultFound +from flask_babel import _ from superset import db from superset.connectors.connector_registry import ConnectorRegistry @@ -42,7 +42,7 @@ class Datasource(BaseSupersetView): def save(self) -> FlaskResponse: data = request.form.get("data") if not isinstance(data, str): - return json_error_response("Request missing data field.", status=500) + return json_error_response(_("Request missing data field."), status=500) datasource_dict = json.loads(data) datasource_id = datasource_dict.get("id") @@ -58,9 +58,7 @@ def save(self) -> FlaskResponse: try: check_ownership(orm_datasource) except SupersetSecurityException: - return json_error_response( - f"{DatasetForbiddenError.message}", DatasetForbiddenError.status - ) + raise DatasetForbiddenError() datasource_dict["owners"] = ( db.session.query(orm_datasource.owner_class) @@ -77,7 +75,11 @@ def save(self) -> FlaskResponse: ] if duplicates: return json_error_response( - f"Duplicate column name(s): {','.join(duplicates)}", status=409 + _( + "Duplicate column name(s): %(columns)s", + columns=",".join(duplicates), + ), + status=409, ) orm_datasource.update_from_object(datasource_dict) if hasattr(orm_datasource, "health_check"): @@ -92,17 +94,10 @@ def save(self) -> FlaskResponse: @api @handle_api_exception def get(self, datasource_type: str, datasource_id: int) -> FlaskResponse: - try: - orm_datasource = ConnectorRegistry.get_datasource( - datasource_type, datasource_id, db.session - ) - if not orm_datasource.data: - return json_error_response( - "Error fetching datasource data.", status=500 - ) - return self.json_response(orm_datasource.data) - except NoResultFound: - return json_error_response("This datasource does not exist", status=400) + datasource = ConnectorRegistry.get_datasource( + datasource_type, datasource_id, db.session + ) + return self.json_response(datasource.data) @expose("/external_metadata///") @has_access_api @@ -112,11 +107,11 @@ def external_metadata( self, datasource_type: str, datasource_id: int ) -> FlaskResponse: """Gets column info from the source system""" + datasource = ConnectorRegistry.get_datasource( + datasource_type, datasource_id, db.session + ) try: - datasource = ConnectorRegistry.get_datasource( - datasource_type, datasource_id, db.session - ) external_metadata = datasource.external_metadata() - return self.json_response(external_metadata) except SupersetException as ex: return json_error_response(str(ex), status=400) + return self.json_response(external_metadata) diff --git a/superset/views/utils.py b/superset/views/utils.py index 9db65a2e0aa12..f37d055d847be 100644 --- a/superset/views/utils.py +++ b/superset/views/utils.py @@ -26,7 +26,7 @@ from flask import g, request from flask_appbuilder.security.sqla import models as ab_models from flask_appbuilder.security.sqla.models import User -from flask_babel import gettext as __ +from flask_babel import _ from sqlalchemy.orm.exc import NoResultFound import superset.models.core as models @@ -227,7 +227,7 @@ def get_datasource_info( if not datasource_id: raise SupersetException( - "The dataset associated with this chart no longer exists" + _("The dataset associated with this chart no longer exists") ) datasource_id = int(datasource_id) @@ -489,7 +489,7 @@ def check_datasource_perms( SupersetError( error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR, level=ErrorLevel.ERROR, - message=__("Could not determine datasource type"), + message=_("Could not determine datasource type"), ) ) @@ -505,7 +505,7 @@ def check_datasource_perms( SupersetError( error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR, level=ErrorLevel.ERROR, - message=__("Could not find viz object"), + message=_("Could not find viz object"), ) ) diff --git a/tests/base_tests.py b/tests/base_tests.py index e494c891757eb..81e218b5c69e7 100644 --- a/tests/base_tests.py +++ b/tests/base_tests.py @@ -18,6 +18,7 @@ """Unit tests for Superset""" import imp import json +from contextlib import contextmanager from typing import Any, Dict, Union, List, Optional from unittest.mock import Mock, patch @@ -26,6 +27,7 @@ from flask import Response from flask_appbuilder.security.sqla import models as ab_models from flask_testing import TestCase +from sqlalchemy.ext.declarative.api import DeclarativeMeta from sqlalchemy.orm import Session from tests.test_app import app @@ -495,3 +497,16 @@ def put_assert_metric( else: mock_method.assert_called_once_with("error", func_name) return rv + + +@contextmanager +def db_insert_temp_object(obj: DeclarativeMeta): + """Insert a temporary object in database; delete when done.""" + session = db.session + try: + session.add(obj) + session.commit() + yield obj + finally: + session.delete(obj) + session.commit() diff --git a/tests/charts/api_tests.py b/tests/charts/api_tests.py index 8e22074fe1ba4..94e4d8f3e8c25 100644 --- a/tests/charts/api_tests.py +++ b/tests/charts/api_tests.py @@ -527,8 +527,7 @@ def test_create_chart_validate_datasource(self): "datasource_id": 1, "datasource_type": "unknown", } - uri = f"api/v1/chart/" - rv = self.post_assert_metric(uri, chart_data, "post") + rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post") self.assertEqual(rv.status_code, 400) response = json.loads(rv.data.decode("utf-8")) self.assertEqual( @@ -540,12 +539,11 @@ def test_create_chart_validate_datasource(self): "datasource_id": 0, "datasource_type": "table", } - uri = f"api/v1/chart/" - rv = self.post_assert_metric(uri, chart_data, "post") + rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post") self.assertEqual(rv.status_code, 422) response = json.loads(rv.data.decode("utf-8")) self.assertEqual( - response, {"message": {"datasource_id": ["Datasource does not exist"]}} + response, {"message": {"datasource_id": ["Dataset does not exist"]}} ) @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @@ -665,25 +663,26 @@ def test_update_chart_validate_datasource(self): Chart API: Test update validate datasource """ admin = self.get_user("admin") - chart = self.insert_chart("title", [admin.id], 1) + chart = self.insert_chart("title", owners=[admin.id], datasource_id=1) self.login(username="admin") + chart_data = {"datasource_id": 1, "datasource_type": "unknown"} - uri = f"api/v1/chart/{chart.id}" - rv = self.put_assert_metric(uri, chart_data, "put") + rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put") self.assertEqual(rv.status_code, 400) response = json.loads(rv.data.decode("utf-8")) self.assertEqual( response, {"message": {"datasource_type": ["Must be one of: druid, table, view."]}}, ) + chart_data = {"datasource_id": 0, "datasource_type": "table"} - uri = f"api/v1/chart/{chart.id}" - rv = self.put_assert_metric(uri, chart_data, "put") + rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put") self.assertEqual(rv.status_code, 422) response = json.loads(rv.data.decode("utf-8")) self.assertEqual( - response, {"message": {"datasource_id": ["Datasource does not exist"]}} + response, {"message": {"datasource_id": ["Dataset does not exist"]}} ) + db.session.delete(chart) db.session.commit() diff --git a/tests/datasets/api_tests.py b/tests/datasets/api_tests.py index ba1e9996ffb3b..5bcaa90905621 100644 --- a/tests/datasets/api_tests.py +++ b/tests/datasets/api_tests.py @@ -475,12 +475,11 @@ def test_create_dataset_validate_uniqueness(self): "database": energy_usage_ds.database_id, "table_name": energy_usage_ds.table_name, } - uri = "api/v1/dataset/" - rv = self.post_assert_metric(uri, table_data, "post") + rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == { - "message": {"table_name": ["Datasource energy_usage already exists"]} + "message": {"table_name": ["Dataset energy_usage already exists"]} } def test_create_dataset_same_name_different_schema(self): @@ -838,7 +837,7 @@ def test_update_dataset_item_uniqueness(self): data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 422 expected_response = { - "message": {"table_name": ["Datasource ab_user already exists"]} + "message": {"table_name": ["Dataset ab_user already exists"]} } assert data == expected_response db.session.delete(dataset) diff --git a/tests/datasource_tests.py b/tests/datasource_tests.py index 290e1351e54d2..ef49640b1f840 100644 --- a/tests/datasource_tests.py +++ b/tests/datasource_tests.py @@ -22,10 +22,11 @@ from superset import app, ConnectorRegistry, db from superset.connectors.sqla.models import SqlaTable +from superset.datasets.commands.exceptions import DatasetNotFoundError from superset.utils.core import get_example_database from tests.fixtures.birth_names_dashboard import load_birth_names_dashboard_with_slices -from .base_tests import SupersetTestCase +from .base_tests import db_insert_temp_object, SupersetTestCase from .fixtures.datasource import datasource_post @@ -72,42 +73,28 @@ def test_external_metadata_for_virtual_table(self): def test_external_metadata_for_malicious_virtual_table(self): self.login(username="admin") - session = db.session table = SqlaTable( table_name="malicious_sql_table", database=get_example_database(), sql="delete table birth_names", ) - session.add(table) - session.commit() - - table = self.get_table_by_name("malicious_sql_table") - url = f"/datasource/external_metadata/table/{table.id}/" - resp = self.get_json_resp(url) - assert "error" in resp - - session.delete(table) - session.commit() + with db_insert_temp_object(table): + url = f"/datasource/external_metadata/table/{table.id}/" + resp = self.get_json_resp(url) + self.assertEqual(resp["error"], "Only `SELECT` statements are allowed") def test_external_metadata_for_mutistatement_virtual_table(self): self.login(username="admin") - session = db.session table = SqlaTable( table_name="multistatement_sql_table", database=get_example_database(), sql="select 123 as intcol, 'abc' as strcol;" "select 123 as intcol, 'abc' as strcol", ) - session.add(table) - session.commit() - - table = self.get_table_by_name("multistatement_sql_table") - url = f"/datasource/external_metadata/table/{table.id}/" - resp = self.get_json_resp(url) - assert "error" in resp - - session.delete(table) - session.commit() + with db_insert_temp_object(table): + url = f"/datasource/external_metadata/table/{table.id}/" + resp = self.get_json_resp(url) + self.assertEqual(resp["error"], "Only single queries supported") def compare_lists(self, l1, l2, key): l2_lookup = {o.get(key): o for o in l2} @@ -251,7 +238,16 @@ def my_check(datasource): del app.config["DATASET_HEALTH_CHECK"] def test_get_datasource_failed(self): + pytest.raises( + DatasetNotFoundError, + lambda: ConnectorRegistry.get_datasource("table", 9999999, db.session), + ) + self.login(username="admin") - url = f"/datasource/get/druid/500000/" - resp = self.get_json_resp(url) - self.assertEqual(resp.get("error"), "This datasource does not exist") + resp = self.get_json_resp("/datasource/get/druid/500000/", raise_on_error=False) + self.assertEqual(resp.get("error"), "Dataset does not exist") + + resp = self.get_json_resp( + "/datasource/get/invalid-datasource-type/500000/", raise_on_error=False + ) + self.assertEqual(resp.get("error"), "Dataset does not exist")