From b820eb82353fedf92aa45f1293ac3dcf4020296d Mon Sep 17 00:00:00 2001 From: "Hugh A. Miles II" Date: Mon, 13 Mar 2023 17:05:13 -0600 Subject: [PATCH] chore: Update pre-commit packages (#23173) --- .pre-commit-config.yaml | 9 +++---- RELEASING/changelog.py | 2 +- .../src/components/ColumnOption.tsx | 8 +++---- .../src/components/MetricOption.tsx | 8 +++---- .../src/components/EmptyState/index.tsx | 24 +++++++------------ .../Table/cell-renderers/NullCell/index.tsx | 8 +++---- .../FilterControls/FilterControls.tsx | 12 ++++------ .../FiltersDropdownContent/index.tsx | 10 ++++---- .../nativeFilters/FilterCard/NameRow.tsx | 18 ++++++-------- .../src/explore/components/ControlHeader.tsx | 10 ++++---- .../controls/VizTypeControl/index.tsx | 24 ++++++++----------- .../src/views/CRUD/alert/AlertReportModal.tsx | 7 +++--- .../src/views/components/Menu.tsx | 7 +++--- .../TimeTable/transformProps.ts | 2 ++ superset/charts/commands/export.py | 1 - superset/charts/schemas.py | 1 - superset/cli/test.py | 1 - superset/commands/export/assets.py | 1 - superset/commands/export/models.py | 1 - superset/common/query_object.py | 2 -- superset/config.py | 1 + superset/dashboards/commands/export.py | 1 - superset/databases/api.py | 6 ++--- superset/databases/commands/export.py | 1 - .../databases/commands/test_connection.py | 1 - superset/databases/schemas.py | 5 ++-- superset/datasets/commands/export.py | 1 - superset/datasource/dao.py | 1 - superset/db_engine_specs/aurora.py | 2 -- superset/db_engine_specs/crate.py | 1 - superset/db_engine_specs/databricks.py | 1 - superset/db_engine_specs/dremio.py | 1 - superset/db_engine_specs/elasticsearch.py | 2 -- superset/db_engine_specs/gsheets.py | 1 - superset/db_engine_specs/hive.py | 3 +-- superset/db_engine_specs/netezza.py | 1 - superset/db_engine_specs/presto.py | 6 ++--- superset/db_engine_specs/rockset.py | 1 - superset/db_engine_specs/snowflake.py | 1 - superset/examples/world_bank.py | 1 - superset/exceptions.py | 2 +- .../2017-10-03_14-37_4736ec66ce19_.py | 5 ---- ...3c581_allow_multi_schema_metadata_fetch.py | 1 - .../2018-03-20_19-47_f231d82b9b26_.py | 2 -- ...0_16-08_937d04c16b64_update_datasources.py | 2 -- ...019-10-10_13-52_1495eb914ad3_time_range.py | 1 - ...-25_31b2a1039d4a_drop_tables_constraint.py | 1 - ...370a_fix_schemas_allowed_for_csv_upload.py | 1 - ...5_change_datatype_of_type_in_basecolumn.py | 1 - ...acd_rename_to_schemas_allowed_for_file_.py | 1 - ..._a9422eeaae74_new_dataset_models_take_2.py | 10 +------- superset/models/dashboard.py | 4 ++-- superset/models/sql_lab.py | 2 -- .../queries/saved_queries/commands/export.py | 1 - superset/reports/commands/base.py | 1 - superset/security/manager.py | 1 - superset/sql_parse.py | 2 -- superset/sqllab/exceptions.py | 12 +++++----- superset/sqllab/query_render.py | 3 +-- superset/sqllab/sql_json_executer.py | 3 +-- superset/tags/models.py | 5 ---- superset/tasks/async_queries.py | 12 ++++++++-- superset/utils/celery.py | 1 + superset/utils/core.py | 9 ++++--- .../dashboard_filter_scopes_converter.py | 2 +- superset/utils/decorators.py | 2 +- superset/utils/machine_auth.py | 2 +- superset/views/base_api.py | 5 +--- superset/views/chart/mixin.py | 1 - superset/views/core.py | 1 - superset/views/dashboard/mixin.py | 1 - superset/viz.py | 1 - tests/common/logger_utils.py | 1 - .../charts/data/api_tests.py | 3 --- tests/integration_tests/cli_tests.py | 1 - tests/integration_tests/csv_upload_tests.py | 2 +- tests/integration_tests/datasets/api_tests.py | 1 - .../db_engine_specs/bigquery_tests.py | 1 + .../fixtures/energy_dashboard.py | 1 - tests/integration_tests/fixtures/users.py | 1 - .../integration_tests/reports/alert_tests.py | 2 -- tests/integration_tests/reports/api_tests.py | 4 ---- .../reports/commands_tests.py | 9 ------- .../reports/scheduler_tests.py | 2 -- .../security/row_level_security_tests.py | 1 - tests/integration_tests/thumbnails_tests.py | 1 - .../pandas_postprocessing/test_flatten.py | 8 ++++++- 87 files changed, 112 insertions(+), 218 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aa0cf4af62d1a..562652aa15842 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,16 +20,17 @@ repos: hooks: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.941 + rev: v1.0.1 hooks: - id: mypy + args: [--check-untyped-defs] additional_dependencies: [types-all] - repo: https://github.com/peterdemin/pip-compile-multi - rev: v2.4.1 + rev: v2.6.2 hooks: - id: pip-compile-multi-verify - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v4.4.0 hooks: - id: check-docstring-first - id: check-added-large-files @@ -41,7 +42,7 @@ repos: - id: trailing-whitespace args: ["--markdown-linebreak-ext=md"] - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.1.0 hooks: - id: black language_version: python3 diff --git a/RELEASING/changelog.py b/RELEASING/changelog.py index 0729853ba57e9..68a54e10be360 100644 --- a/RELEASING/changelog.py +++ b/RELEASING/changelog.py @@ -138,7 +138,7 @@ def _get_pull_request_details(self, git_log: GitLog) -> Dict[str, Any]: title = pr_info.title if pr_info else git_log.message pr_type = re.match(SUPERSET_PULL_REQUEST_TYPES, title) if pr_type: - pr_type = pr_type.group().strip('"') + pr_type = pr_type.group().strip('"') # type: ignore labels = (" | ").join([label.name for label in pr_info.labels]) is_risky = self._is_risk_pull_request(pr_info.labels) diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/components/ColumnOption.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/components/ColumnOption.tsx index fce2e8ff2ad07..074d816c829d7 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/components/ColumnOption.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/components/ColumnOption.tsx @@ -59,11 +59,9 @@ export function ColumnOption({ - css` - margin-right: ${theme.gridUnit}px; - ` - } + css={(theme: SupersetTheme) => css` + margin-right: ${theme.gridUnit}px; + `} ref={labelRef} > {getColumnLabelText(column)} diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/components/MetricOption.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/components/MetricOption.tsx index 9ee391aba5c11..fe2ac9750aa0a 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/components/MetricOption.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/components/MetricOption.tsx @@ -71,11 +71,9 @@ export function MetricOption({ const label = ( - css` - margin-right: ${theme.gridUnit}px; - ` - } + css={(theme: SupersetTheme) => css` + margin-right: ${theme.gridUnit}px; + `} ref={labelRef} > {link} diff --git a/superset-frontend/src/components/EmptyState/index.tsx b/superset-frontend/src/components/EmptyState/index.tsx index b8230c8fcf246..95c454b0ae130 100644 --- a/superset-frontend/src/components/EmptyState/index.tsx +++ b/superset-frontend/src/components/EmptyState/index.tsx @@ -158,11 +158,9 @@ export const EmptyStateBig = ({ {image && } - css` - max-width: ${theme.gridUnit * 150}px; - ` - } + css={(theme: SupersetTheme) => css` + max-width: ${theme.gridUnit * 150}px; + `} > {title} {description && {description}} @@ -189,11 +187,9 @@ export const EmptyStateMedium = ({ {image && } - css` - max-width: ${theme.gridUnit * 100}px; - ` - } + css={(theme: SupersetTheme) => css` + max-width: ${theme.gridUnit * 100}px; + `} > {title} {description && {description}} @@ -218,11 +214,9 @@ export const EmptyStateSmall = ({ {image && } - css` - max-width: ${theme.gridUnit * 75}px; - ` - } + css={(theme: SupersetTheme) => css` + max-width: ${theme.gridUnit * 75}px; + `} > {title} {description && {description}} diff --git a/superset-frontend/src/components/Table/cell-renderers/NullCell/index.tsx b/superset-frontend/src/components/Table/cell-renderers/NullCell/index.tsx index f1c9139fd9f0d..36601d3b94c4f 100644 --- a/superset-frontend/src/components/Table/cell-renderers/NullCell/index.tsx +++ b/superset-frontend/src/components/Table/cell-renderers/NullCell/index.tsx @@ -23,11 +23,9 @@ import { NULL_DISPLAY } from 'src/constants'; function NullCell() { return ( - css` - color: ${theme.colors.grayscale.light1}; - ` - } + css={(theme: SupersetTheme) => css` + color: ${theme.colors.grayscale.light1}; + `} > {NULL_DISPLAY} diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControls.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControls.tsx index b44591f4b1a9c..831d332c25111 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControls.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControls.tsx @@ -226,13 +226,11 @@ const FilterControls: FC = ({ const renderHorizontalContent = () => (
- css` - padding: 0 ${theme.gridUnit * 4}px; - min-width: 0; - flex: 1; - ` - } + css={(theme: SupersetTheme) => css` + padding: 0 ${theme.gridUnit * 4}px; + min-width: 0; + flex: 1; + `} > (
- css` - width: ${theme.gridUnit * 56}px; - padding: ${theme.gridUnit}px 0; - ` - } + css={(theme: SupersetTheme) => css` + width: ${theme.gridUnit * 56}px; + padding: ${theme.gridUnit}px 0; + `} > {overflowedCrossFilters.map(crossFilter => rendererCrossFilter( diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/NameRow.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/NameRow.tsx index 6c7e82b15ddb4..6f42b05e08f79 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/NameRow.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/NameRow.tsx @@ -44,20 +44,16 @@ export const NameRow = ({ return ( - css` - margin-bottom: ${theme.gridUnit * 3}px; - justify-content: space-between; - ` - } + css={(theme: SupersetTheme) => css` + margin-bottom: ${theme.gridUnit * 3}px; + justify-content: space-between; + `} > - css` - margin-right: ${theme.gridUnit}px; - ` - } + css={(theme: SupersetTheme) => css` + margin-right: ${theme.gridUnit}px; + `} /> {filter.name} diff --git a/superset-frontend/src/explore/components/ControlHeader.tsx b/superset-frontend/src/explore/components/ControlHeader.tsx index 3bc16d94a5336..9633fba6139a1 100644 --- a/superset-frontend/src/explore/components/ControlHeader.tsx +++ b/superset-frontend/src/explore/components/ControlHeader.tsx @@ -134,12 +134,10 @@ const ControlHeader: FC = ({
- css` - margin-bottom: ${theme.gridUnit * 0.5}px; - position: relative; - ` - } + css={(theme: SupersetTheme) => css` + margin-bottom: ${theme.gridUnit * 0.5}px; + position: relative; + `} > {leftNode && {leftNode}} - css` - margin-top: ${theme.gridUnit}px; - ` - } + css={(theme: SupersetTheme) => css` + margin-top: ${theme.gridUnit}px; + `} > {' '} {t('This visualization type is not supported.')} @@ -114,15 +112,13 @@ const VizTypeControl = ({ {initialValue && }
- css` - display: flex; - justify-content: flex-end; - margin-top: ${theme.gridUnit * 3}px; - color: ${theme.colors.grayscale.base}; - text-decoration: underline; - ` - } + css={(theme: SupersetTheme) => css` + display: flex; + justify-content: flex-end; + margin-top: ${theme.gridUnit * 3}px; + color: ${theme.colors.grayscale.base}; + text-decoration: underline; + `} > {t('View all charts')} diff --git a/superset-frontend/src/views/CRUD/alert/AlertReportModal.tsx b/superset-frontend/src/views/CRUD/alert/AlertReportModal.tsx index d7f7a4c7364c3..302f9afdd9965 100644 --- a/superset-frontend/src/views/CRUD/alert/AlertReportModal.tsx +++ b/superset-frontend/src/views/CRUD/alert/AlertReportModal.tsx @@ -371,10 +371,9 @@ const timezoneHeaderStyle = (theme: SupersetTheme) => css` margin: ${theme.gridUnit * 3}px 0; `; -const inputSpacer = (theme: SupersetTheme) => - css` - margin-right: ${theme.gridUnit * 3}px; - `; +const inputSpacer = (theme: SupersetTheme) => css` + margin-right: ${theme.gridUnit * 3}px; +`; type NotificationAddStatus = 'active' | 'disabled' | 'hidden'; diff --git a/superset-frontend/src/views/components/Menu.tsx b/superset-frontend/src/views/components/Menu.tsx index 652ae63fd91ce..92766cfdda3a5 100644 --- a/superset-frontend/src/views/components/Menu.tsx +++ b/superset-frontend/src/views/components/Menu.tsx @@ -59,9 +59,10 @@ const StyledHeader = styled.header` justify-content: center; /* must be exactly the height of the Antd navbar */ min-height: 50px; - padding: ${theme.gridUnit}px ${theme.gridUnit * 2}px ${ - theme.gridUnit - }px ${theme.gridUnit * 4}px; + padding: ${theme.gridUnit}px + ${theme.gridUnit * 2}px + ${theme.gridUnit}px + ${theme.gridUnit * 4}px; max-width: ${theme.gridUnit * theme.brandIconMaxWidth}px; img { height: 100%; diff --git a/superset-frontend/src/visualizations/TimeTable/transformProps.ts b/superset-frontend/src/visualizations/TimeTable/transformProps.ts index f408cc21869da..951042544190b 100644 --- a/superset-frontend/src/visualizations/TimeTable/transformProps.ts +++ b/superset-frontend/src/visualizations/TimeTable/transformProps.ts @@ -55,11 +55,13 @@ export default function transformProps(chartProps: TableChartProps) { typeof column === 'object' ? column : { label: column }, ); } else { + /* eslint-disable */ const metricMap = datasource.metrics.reduce((acc, current) => { const map = acc; map[current.metric_name] = current; return map; }, {} as Record); + /* eslint-disable */ rows = metrics.map(metric => typeof metric === 'object' ? metric : metricMap[metric], ); diff --git a/superset/charts/commands/export.py b/superset/charts/commands/export.py index 39c3c7d46a77b..ebf660ac6d733 100644 --- a/superset/charts/commands/export.py +++ b/superset/charts/commands/export.py @@ -38,7 +38,6 @@ class ExportChartsCommand(ExportModelsCommand): - dao = ChartDAO not_found = ChartNotFoundError diff --git a/superset/charts/schemas.py b/superset/charts/schemas.py index 3ee1b6f1f4ec6..5c345b8ea2f0a 100644 --- a/superset/charts/schemas.py +++ b/superset/charts/schemas.py @@ -841,7 +841,6 @@ class ChartDataFilterSchema(Schema): class ChartDataExtrasSchema(Schema): - relative_start = fields.String( description="Start time for relative time deltas. " 'Default: `config["DEFAULT_RELATIVE_START_TIME"]`', diff --git a/superset/cli/test.py b/superset/cli/test.py index db065287b008b..f175acec470cd 100755 --- a/superset/cli/test.py +++ b/superset/cli/test.py @@ -45,7 +45,6 @@ def load_test_users_run() -> None: Syncs permissions for those users/roles """ if app.config["TESTING"]: - sm = security_manager examples_db = database_utils.get_example_database() diff --git a/superset/commands/export/assets.py b/superset/commands/export/assets.py index 8711cac4dd01c..9f088af428d04 100644 --- a/superset/commands/export/assets.py +++ b/superset/commands/export/assets.py @@ -37,7 +37,6 @@ class ExportAssetsCommand(BaseCommand): """ def run(self) -> Iterator[Tuple[str, str]]: - metadata = { "version": EXPORT_VERSION, "type": "assets", diff --git a/superset/commands/export/models.py b/superset/commands/export/models.py index dd4ff3bc57172..4edafaa7464d0 100644 --- a/superset/commands/export/models.py +++ b/superset/commands/export/models.py @@ -30,7 +30,6 @@ class ExportModelsCommand(BaseCommand): - dao: Type[BaseDAO] = BaseDAO not_found: Type[CommandException] = CommandException diff --git a/superset/common/query_object.py b/superset/common/query_object.py index 70f50c3e77603..ebed4e040fc0c 100644 --- a/superset/common/query_object.py +++ b/superset/common/query_object.py @@ -403,11 +403,9 @@ def cache_key(self, **extra: Any) -> str: and hasattr(self.datasource, "database") and self.datasource.database.impersonate_user ): - if key := self.datasource.database.db_engine_spec.get_impersonation_key( getattr(g, "user", None) ): - logger.debug( "Adding impersonation key to QueryObject cache dict: %s", key ) diff --git a/superset/config.py b/superset/config.py index ae6c1002634e2..9643f30883480 100644 --- a/superset/config.py +++ b/superset/config.py @@ -1204,6 +1204,7 @@ def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument # functionality for both the SQL_Lab and Charts. MUTATE_AFTER_SPLIT = False + # This allows for a user to add header data to any outgoing emails. For example, # if you need to include metadata in the header or you want to change the specifications # of the email title, header, or sender. diff --git a/superset/dashboards/commands/export.py b/superset/dashboards/commands/export.py index c175556943874..886b84ffa6db0 100644 --- a/superset/dashboards/commands/export.py +++ b/superset/dashboards/commands/export.py @@ -102,7 +102,6 @@ def append_charts(position: Dict[str, Any], charts: Set[Slice]) -> Dict[str, Any class ExportDashboardsCommand(ExportModelsCommand): - dao = DashboardDAO not_found = DashboardNotFoundError diff --git a/superset/databases/api.py b/superset/databases/api.py index 5bda161a3a8b8..f9be5e7e97a5a 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -1308,12 +1308,10 @@ def available(self) -> Response: and hasattr(engine_spec, "sqlalchemy_uri_placeholder") and getattr(engine_spec, "default_driver") in drivers ): - payload[ - "parameters" - ] = engine_spec.parameters_json_schema() # type: ignore + payload["parameters"] = engine_spec.parameters_json_schema() payload[ "sqlalchemy_uri_placeholder" - ] = engine_spec.sqlalchemy_uri_placeholder # type: ignore + ] = engine_spec.sqlalchemy_uri_placeholder available_databases.append(payload) diff --git a/superset/databases/commands/export.py b/superset/databases/commands/export.py index acb794531deba..e1f8fc2a25165 100644 --- a/superset/databases/commands/export.py +++ b/superset/databases/commands/export.py @@ -51,7 +51,6 @@ def parse_extra(extra_payload: str) -> Dict[str, Any]: class ExportDatabasesCommand(ExportModelsCommand): - dao = DatabaseDAO not_found = DatabaseNotFoundError diff --git a/superset/databases/commands/test_connection.py b/superset/databases/commands/test_connection.py index c5e7dc48f9831..cbc1240905a10 100644 --- a/superset/databases/commands/test_connection.py +++ b/superset/databases/commands/test_connection.py @@ -200,7 +200,6 @@ def ping(engine: Engine) -> bool: ) raise DatabaseSecurityUnsafeError(message=str(ex)) from ex except SupersetTimeoutException as ex: - event_logger.log_with_context( action=get_log_connection_action( "test_connection_error", ssh_tunnel, ex diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py index 234209f17305c..288408969d32e 100644 --- a/superset/databases/schemas.py +++ b/superset/databases/schemas.py @@ -303,7 +303,7 @@ def build_sqlalchemy_uri( ) # validate parameters - parameters = engine_spec.parameters_schema.load(parameters) # type: ignore + parameters = engine_spec.parameters_schema.load(parameters) serialized_encrypted_extra = data.get("masked_encrypted_extra") or "{}" try: @@ -311,7 +311,7 @@ def build_sqlalchemy_uri( except json.decoder.JSONDecodeError: encrypted_extra = {} - data["sqlalchemy_uri"] = engine_spec.build_sqlalchemy_uri( # type: ignore + data["sqlalchemy_uri"] = engine_spec.build_sqlalchemy_uri( parameters, encrypted_extra, ) @@ -488,7 +488,6 @@ class Meta: # pylint: disable=too-few-public-methods class DatabaseTestConnectionSchema(Schema, DatabaseParametersSchemaMixin): - rename_encrypted_extra = pre_load(rename_encrypted_extra) database_name = fields.String( diff --git a/superset/datasets/commands/export.py b/superset/datasets/commands/export.py index cc6dad5d255be..c6fe43c89df33 100644 --- a/superset/datasets/commands/export.py +++ b/superset/datasets/commands/export.py @@ -37,7 +37,6 @@ class ExportDatasetsCommand(ExportModelsCommand): - dao = DatasetDAO not_found = DatasetNotFoundError diff --git a/superset/datasource/dao.py b/superset/datasource/dao.py index c8df4c8d8d968..158a32c7fdc6f 100644 --- a/superset/datasource/dao.py +++ b/superset/datasource/dao.py @@ -34,7 +34,6 @@ class DatasourceDAO(BaseDAO): - sources: Dict[Union[DatasourceType, str], Type[Datasource]] = { DatasourceType.TABLE: SqlaTable, DatasourceType.QUERY: Query, diff --git a/superset/db_engine_specs/aurora.py b/superset/db_engine_specs/aurora.py index b3be750ed9df9..0baaf1e9b1249 100644 --- a/superset/db_engine_specs/aurora.py +++ b/superset/db_engine_specs/aurora.py @@ -19,7 +19,6 @@ class AuroraMySQLDataAPI(MySQLEngineSpec): - engine = "mysql" default_driver = "auroradataapi" engine_name = "Aurora MySQL (Data API)" @@ -33,7 +32,6 @@ class AuroraMySQLDataAPI(MySQLEngineSpec): class AuroraPostgresDataAPI(PostgresEngineSpec): - engine = "postgresql" default_driver = "auroradataapi" engine_name = "Aurora PostgreSQL (Data API)" diff --git a/superset/db_engine_specs/crate.py b/superset/db_engine_specs/crate.py index 7cf7bed15d9c9..6eafae829edda 100644 --- a/superset/db_engine_specs/crate.py +++ b/superset/db_engine_specs/crate.py @@ -28,7 +28,6 @@ class CrateEngineSpec(BaseEngineSpec): - engine = "crate" engine_name = "CrateDB" diff --git a/superset/db_engine_specs/databricks.py b/superset/db_engine_specs/databricks.py index df82b6d2f93e9..a3650523b2cba 100644 --- a/superset/db_engine_specs/databricks.py +++ b/superset/db_engine_specs/databricks.py @@ -192,7 +192,6 @@ def get_table_names( def build_sqlalchemy_uri( # type: ignore cls, parameters: DatabricksParametersType, *_ ) -> str: - query = {} if parameters.get("encryption"): if not cls.encryption_parameters: diff --git a/superset/db_engine_specs/dremio.py b/superset/db_engine_specs/dremio.py index 0c773e70938f4..08a10f12a225c 100644 --- a/superset/db_engine_specs/dremio.py +++ b/superset/db_engine_specs/dremio.py @@ -23,7 +23,6 @@ class DremioEngineSpec(BaseEngineSpec): - engine = "dremio" engine_name = "Dremio" diff --git a/superset/db_engine_specs/elasticsearch.py b/superset/db_engine_specs/elasticsearch.py index b47a61d0ccb70..c96d0b36a7e87 100644 --- a/superset/db_engine_specs/elasticsearch.py +++ b/superset/db_engine_specs/elasticsearch.py @@ -67,7 +67,6 @@ def get_dbapi_exception_mapping(cls) -> Dict[Type[Exception], Type[Exception]]: def convert_dttm( cls, target_type: str, dttm: datetime, db_extra: Optional[Dict[str, Any]] = None ) -> Optional[str]: - db_extra = db_extra or {} sqla_type = cls.get_sqla_column_type(target_type) @@ -99,7 +98,6 @@ def convert_dttm( class OpenDistroEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method - time_groupby_inline = True time_secondary_columns = True allows_joins = False diff --git a/superset/db_engine_specs/gsheets.py b/superset/db_engine_specs/gsheets.py index c181ae62254de..17156f5a9efcb 100644 --- a/superset/db_engine_specs/gsheets.py +++ b/superset/db_engine_specs/gsheets.py @@ -257,7 +257,6 @@ def validate_parameters( idx = 0 for name, url in table_catalog.items(): - if not name: errors.append( SupersetError( diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index 63c531e843edf..a5cbfdb792fff 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -191,7 +191,6 @@ def df_to_sql( raise SupersetException("Append operation not currently supported") if to_sql_kwargs["if_exists"] == "fail": - # Ensure table doesn't already exist. if table.schema: table_exists = not database.get_df( @@ -425,7 +424,7 @@ def _get_fields(cls, cols: List[Dict[str, Any]]) -> List[ColumnClause]: return BaseEngineSpec._get_fields(cols) # pylint: disable=protected-access @classmethod - def latest_sub_partition( + def latest_sub_partition( # type: ignore cls, table_name: str, schema: Optional[str], database: "Database", **kwargs: Any ) -> str: # TODO(bogdan): implement` diff --git a/superset/db_engine_specs/netezza.py b/superset/db_engine_specs/netezza.py index 9d3b5ec180c7b..6f336b96bc3f7 100644 --- a/superset/db_engine_specs/netezza.py +++ b/superset/db_engine_specs/netezza.py @@ -18,7 +18,6 @@ class NetezzaEngineSpec(PostgresBaseEngineSpec): - engine = "netezza" default_driver = "nzpy" engine_name = "IBM Netezza Performance Server" diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index 72931a85b420c..5a914631578a0 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -1267,10 +1267,10 @@ def handle_cursor(cls, cursor: "Cursor", query: Query, session: Session) -> None def _extract_error_message(cls, ex: Exception) -> str: if ( hasattr(ex, "orig") - and type(ex.orig).__name__ == "DatabaseError" # type: ignore - and isinstance(ex.orig[0], dict) # type: ignore + and type(ex.orig).__name__ == "DatabaseError" + and isinstance(ex.orig[0], dict) ): - error_dict = ex.orig[0] # type: ignore + error_dict = ex.orig[0] return "{} at {}: {}".format( error_dict.get("errorName"), error_dict.get("errorLocation"), diff --git a/superset/db_engine_specs/rockset.py b/superset/db_engine_specs/rockset.py index 3778c527560f6..cc215054be5f7 100644 --- a/superset/db_engine_specs/rockset.py +++ b/superset/db_engine_specs/rockset.py @@ -26,7 +26,6 @@ class RocksetEngineSpec(BaseEngineSpec): - engine = "rockset" engine_name = "Rockset" diff --git a/superset/db_engine_specs/snowflake.py b/superset/db_engine_specs/snowflake.py index 419e0a0655fe0..2b7c14d2666af 100644 --- a/superset/db_engine_specs/snowflake.py +++ b/superset/db_engine_specs/snowflake.py @@ -222,7 +222,6 @@ def build_sqlalchemy_uri( Dict[str, Any] ] = None, ) -> str: - return str( URL( "snowflake", diff --git a/superset/examples/world_bank.py b/superset/examples/world_bank.py index b65ad68d1af62..2972188e0267e 100644 --- a/superset/examples/world_bank.py +++ b/superset/examples/world_bank.py @@ -52,7 +52,6 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-s tbl_name = "wb_health_population" database = superset.utils.database.get_example_database() with database.get_sqla_engine_with_context() as engine: - schema = inspect(engine).default_schema_name table_exists = database.has_table_by_name(tbl_name) diff --git a/superset/exceptions.py b/superset/exceptions.py index cee15be376394..32b06203cdb1e 100644 --- a/superset/exceptions.py +++ b/superset/exceptions.py @@ -54,7 +54,7 @@ def to_dict(self) -> Dict[str, Any]: if self.error_type: rv["error_type"] = self.error_type if self.exception is not None and hasattr(self.exception, "to_dict"): - rv = {**rv, **self.exception.to_dict()} # type: ignore + rv = {**rv, **self.exception.to_dict()} return rv diff --git a/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py b/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py index 7fe94f3c8a9c4..26cfb93b991ca 100644 --- a/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py +++ b/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py @@ -65,7 +65,6 @@ def upgrade(): # datasources.datasource_name column. for foreign in ["columns", "metrics"]: with op.batch_alter_table(foreign, naming_convention=conv) as batch_op: - # Add the datasource_id column with the relevant constraints. batch_op.add_column(sa.Column("datasource_id", sa.Integer)) @@ -94,7 +93,6 @@ def upgrade(): ) with op.batch_alter_table(foreign, naming_convention=conv) as batch_op: - # Drop the datasource_name column and associated constraints. Note # due to prior revisions (1226819ee0e3, 3b626e2a6783) there may # incorrectly be multiple duplicate constraints. @@ -146,7 +144,6 @@ def downgrade(): # datasources.datasource_id column. for foreign in ["columns", "metrics"]: with op.batch_alter_table(foreign, naming_convention=conv) as batch_op: - # Add the datasource_name column with the relevant constraints. batch_op.add_column(sa.Column("datasource_name", sa.String(255))) @@ -175,7 +172,6 @@ def downgrade(): ) with op.batch_alter_table(foreign, naming_convention=conv) as batch_op: - # Drop the datasource_id column and associated constraint. batch_op.drop_constraint( "fk_{}_datasource_id_datasources".format(foreign), type_="foreignkey" @@ -184,7 +180,6 @@ def downgrade(): batch_op.drop_column("datasource_id") with op.batch_alter_table("datasources", naming_convention=conv) as batch_op: - # Prior to dropping the uniqueness constraint, the foreign key # associated with the cluster_name column needs to be dropped. batch_op.drop_constraint( diff --git a/superset/migrations/versions/2018-03-06_12-24_e68c4473c581_allow_multi_schema_metadata_fetch.py b/superset/migrations/versions/2018-03-06_12-24_e68c4473c581_allow_multi_schema_metadata_fetch.py index 8e4284d9421a8..73eb4f95e96c9 100644 --- a/superset/migrations/versions/2018-03-06_12-24_e68c4473c581_allow_multi_schema_metadata_fetch.py +++ b/superset/migrations/versions/2018-03-06_12-24_e68c4473c581_allow_multi_schema_metadata_fetch.py @@ -30,7 +30,6 @@ def upgrade(): - op.add_column( "dbs", sa.Column( diff --git a/superset/migrations/versions/2018-03-20_19-47_f231d82b9b26_.py b/superset/migrations/versions/2018-03-20_19-47_f231d82b9b26_.py index 1eecfd726415d..dbe3f0ace4220 100644 --- a/superset/migrations/versions/2018-03-20_19-47_f231d82b9b26_.py +++ b/superset/migrations/versions/2018-03-20_19-47_f231d82b9b26_.py @@ -36,7 +36,6 @@ def upgrade(): - # Reduce the size of the metric_name column for constraint viability. with op.batch_alter_table("metrics", naming_convention=conv) as batch_op: batch_op.alter_column( @@ -55,7 +54,6 @@ def upgrade(): def downgrade(): - bind = op.get_bind() insp = sa.engine.reflection.Inspector.from_engine(bind) diff --git a/superset/migrations/versions/2018-07-20_16-08_937d04c16b64_update_datasources.py b/superset/migrations/versions/2018-07-20_16-08_937d04c16b64_update_datasources.py index 5a99d7af529a4..98c14f4570fb0 100644 --- a/superset/migrations/versions/2018-07-20_16-08_937d04c16b64_update_datasources.py +++ b/superset/migrations/versions/2018-07-20_16-08_937d04c16b64_update_datasources.py @@ -31,7 +31,6 @@ def upgrade(): - # Enforce that the datasource_name column be non-nullable. with op.batch_alter_table("datasources") as batch_op: batch_op.alter_column( @@ -40,7 +39,6 @@ def upgrade(): def downgrade(): - # Forego that the datasource_name column be non-nullable. with op.batch_alter_table("datasources") as batch_op: batch_op.alter_column( diff --git a/superset/migrations/versions/2019-10-10_13-52_1495eb914ad3_time_range.py b/superset/migrations/versions/2019-10-10_13-52_1495eb914ad3_time_range.py index 3b39d6b1abe7f..374dcc672ae6f 100644 --- a/superset/migrations/versions/2019-10-10_13-52_1495eb914ad3_time_range.py +++ b/superset/migrations/versions/2019-10-10_13-52_1495eb914ad3_time_range.py @@ -69,7 +69,6 @@ def downgrade(): form_data = json.loads(slc.params) if "time_range" in form_data: - # Note defaults and relative dates are not compatible with since/until # and thus the time range is persisted. try: diff --git a/superset/migrations/versions/2021-07-27_08-25_31b2a1039d4a_drop_tables_constraint.py b/superset/migrations/versions/2021-07-27_08-25_31b2a1039d4a_drop_tables_constraint.py index 02123d73ef5d4..8f07ba1ae3d8c 100644 --- a/superset/migrations/versions/2021-07-27_08-25_31b2a1039d4a_drop_tables_constraint.py +++ b/superset/migrations/versions/2021-07-27_08-25_31b2a1039d4a_drop_tables_constraint.py @@ -48,7 +48,6 @@ def upgrade(): def downgrade(): - # One cannot simply re-add the uniqueness constraint as it may not have previously # existed. pass diff --git a/superset/migrations/versions/2021-08-02_16-39_e323605f370a_fix_schemas_allowed_for_csv_upload.py b/superset/migrations/versions/2021-08-02_16-39_e323605f370a_fix_schemas_allowed_for_csv_upload.py index eb5d1b424293f..24527472674dc 100644 --- a/superset/migrations/versions/2021-08-02_16-39_e323605f370a_fix_schemas_allowed_for_csv_upload.py +++ b/superset/migrations/versions/2021-08-02_16-39_e323605f370a_fix_schemas_allowed_for_csv_upload.py @@ -39,7 +39,6 @@ class Database(Base): - __tablename__ = "dbs" id = Column(Integer, primary_key=True) extra = Column(Text) diff --git a/superset/migrations/versions/2021-11-02_17-44_3ba29ecbaac5_change_datatype_of_type_in_basecolumn.py b/superset/migrations/versions/2021-11-02_17-44_3ba29ecbaac5_change_datatype_of_type_in_basecolumn.py index 4f94a4bb9beac..3488650bf1b7c 100644 --- a/superset/migrations/versions/2021-11-02_17-44_3ba29ecbaac5_change_datatype_of_type_in_basecolumn.py +++ b/superset/migrations/versions/2021-11-02_17-44_3ba29ecbaac5_change_datatype_of_type_in_basecolumn.py @@ -31,7 +31,6 @@ def upgrade(): - with op.batch_alter_table("table_columns") as batch_op: batch_op.alter_column( "type", existing_type=sa.VARCHAR(length=32), type_=sa.TEXT() diff --git a/superset/migrations/versions/2021-11-11_04-18_0ca9e5f1dacd_rename_to_schemas_allowed_for_file_.py b/superset/migrations/versions/2021-11-11_04-18_0ca9e5f1dacd_rename_to_schemas_allowed_for_file_.py index 5a2a4b94f4208..629c29ecd7d7a 100644 --- a/superset/migrations/versions/2021-11-11_04-18_0ca9e5f1dacd_rename_to_schemas_allowed_for_file_.py +++ b/superset/migrations/versions/2021-11-11_04-18_0ca9e5f1dacd_rename_to_schemas_allowed_for_file_.py @@ -39,7 +39,6 @@ class Database(Base): - __tablename__ = "dbs" id = Column(Integer, primary_key=True) extra = Column(Text) diff --git a/superset/migrations/versions/2022-04-01_14-38_a9422eeaae74_new_dataset_models_take_2.py b/superset/migrations/versions/2022-04-01_14-38_a9422eeaae74_new_dataset_models_take_2.py index 2dcd1650f0efa..286a0731fc7e6 100644 --- a/superset/migrations/versions/2022-04-01_14-38_a9422eeaae74_new_dataset_models_take_2.py +++ b/superset/migrations/versions/2022-04-01_14-38_a9422eeaae74_new_dataset_models_take_2.py @@ -103,7 +103,6 @@ def insert_from_select( class Database(Base): - __tablename__ = "dbs" __table_args__ = (UniqueConstraint("database_name"),) @@ -118,7 +117,6 @@ class Database(Base): class TableColumn(AuxiliaryColumnsMixin, Base): - __tablename__ = "table_columns" __table_args__ = (UniqueConstraint("table_id", "column_name"),) @@ -138,7 +136,6 @@ class TableColumn(AuxiliaryColumnsMixin, Base): class SqlMetric(AuxiliaryColumnsMixin, Base): - __tablename__ = "sql_metrics" __table_args__ = (UniqueConstraint("table_id", "metric_name"),) @@ -164,7 +161,6 @@ class SqlMetric(AuxiliaryColumnsMixin, Base): class SqlaTable(AuxiliaryColumnsMixin, Base): - __tablename__ = "tables" __table_args__ = (UniqueConstraint("database_id", "schema", "table_name"),) @@ -213,7 +209,6 @@ class SqlaTable(AuxiliaryColumnsMixin, Base): class NewColumn(AuxiliaryColumnsMixin, Base): - __tablename__ = "sl_columns" id = sa.Column(sa.Integer, primary_key=True) @@ -243,7 +238,6 @@ class NewColumn(AuxiliaryColumnsMixin, Base): class NewTable(AuxiliaryColumnsMixin, Base): - __tablename__ = "sl_tables" id = sa.Column(sa.Integer, primary_key=True) @@ -264,7 +258,6 @@ class NewTable(AuxiliaryColumnsMixin, Base): class NewDataset(Base, AuxiliaryColumnsMixin): - __tablename__ = "sl_datasets" id = sa.Column(sa.Integer, primary_key=True) @@ -636,7 +629,6 @@ def postprocess_columns(session: Session) -> None: return def get_joined_tables(offset, limit): - # Import aliased from sqlalchemy from sqlalchemy.orm import aliased @@ -788,7 +780,7 @@ def print_update_count(): updates["external_url"] = external_url # update extra json - for (key, val) in ( + for key, val in ( { "verbose_name": verbose_name, "python_date_format": python_date_format, diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index 289fbd093e201..0395ffa1d57f0 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -323,8 +323,8 @@ def datasets_trimmed_for_slices(self) -> List[Dict[str, Any]]: return result - @property # type: ignore - def params(self) -> str: # type: ignore + @property + def params(self) -> str: return self.json_metadata @params.setter diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py index f91a22a34a3c8..f12c8d6c45521 100644 --- a/superset/models/sql_lab.py +++ b/superset/models/sql_lab.py @@ -430,7 +430,6 @@ def last_run_delta_humanized(self) -> str: class TabState(Model, AuditMixinNullable, ExtraJSONMixin): - __tablename__ = "tab_state" # basic info @@ -493,7 +492,6 @@ def to_dict(self) -> Dict[str, Any]: class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin): - __tablename__ = "table_schema" id = Column(Integer, primary_key=True, autoincrement=True) diff --git a/superset/queries/saved_queries/commands/export.py b/superset/queries/saved_queries/commands/export.py index e209ae8ad2fd8..8c5357159e604 100644 --- a/superset/queries/saved_queries/commands/export.py +++ b/superset/queries/saved_queries/commands/export.py @@ -33,7 +33,6 @@ class ExportSavedQueriesCommand(ExportModelsCommand): - dao = SavedQueryDAO not_found = SavedQueryNotFoundError diff --git a/superset/reports/commands/base.py b/superset/reports/commands/base.py index 81ad17d42e630..4fee6a8824568 100644 --- a/superset/reports/commands/base.py +++ b/superset/reports/commands/base.py @@ -36,7 +36,6 @@ class BaseReportScheduleCommand(BaseCommand): - _properties: Dict[str, Any] def run(self) -> Any: diff --git a/superset/security/manager.py b/superset/security/manager.py index f068d2baec70d..aa2aab9a19ad1 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -2205,7 +2205,6 @@ def is_guest_user(user: Optional[Any] = None) -> bool: return hasattr(user, "is_guest_user") and user.is_guest_user def get_current_guest_user_if_guest(self) -> Optional[GuestUser]: - if self.is_guest_user(): return g.user return None diff --git a/superset/sql_parse.py b/superset/sql_parse.py index ef2c38ccfa182..f84d73bef032a 100644 --- a/superset/sql_parse.py +++ b/superset/sql_parse.py @@ -509,7 +509,6 @@ def has_table_query(token_list: TokenList) -> bool: """ state = InsertRLSState.SCANNING for token in token_list.tokens: - # Recurse into child token list if isinstance(token, TokenList) and has_table_query(token): return True @@ -607,7 +606,6 @@ def insert_rls( rls: Optional[TokenList] = None state = InsertRLSState.SCANNING for token in token_list.tokens: - # Recurse into child token list if isinstance(token, TokenList): i = token_list.tokens.index(token) diff --git a/superset/sqllab/exceptions.py b/superset/sqllab/exceptions.py index c0096d5db6b47..70e4fa9752b95 100644 --- a/superset/sqllab/exceptions.py +++ b/superset/sqllab/exceptions.py @@ -48,13 +48,13 @@ def __init__( # pylint: disable=too-many-arguments if exception is not None: if ( hasattr(exception, "error_type") - and exception.error_type is not None # type: ignore + and exception.error_type is not None ): - error_type = exception.error_type # type: ignore + error_type = exception.error_type elif hasattr(exception, "error") and isinstance( - exception.error, SupersetError # type: ignore + exception.error, SupersetError ): - error_type = exception.error.error_type # type: ignore + error_type = exception.error.error_type else: error_type = SupersetErrorType.GENERIC_BACKEND_ERROR @@ -79,9 +79,9 @@ def _get_reason( return ": {}".format(reason_message) if exception is not None: if hasattr(exception, "get_message"): - return ": {}".format(exception.get_message()) # type: ignore + return ": {}".format(exception.get_message()) if hasattr(exception, "message"): - return ": {}".format(exception.message) # type: ignore + return ": {}".format(exception.message) return ": {}".format(str(exception)) return "" diff --git a/superset/sqllab/query_render.py b/superset/sqllab/query_render.py index 2854a7e390774..be7a50ed3a19b 100644 --- a/superset/sqllab/query_render.py +++ b/superset/sqllab/query_render.py @@ -48,8 +48,7 @@ class SqlQueryRenderImpl(SqlQueryRender): def __init__( self, sql_template_factory: Callable[..., BaseTemplateProcessor] ) -> None: - - self._sql_template_processor_factory = sql_template_factory # type: ignore + self._sql_template_processor_factory = sql_template_factory def render(self, execution_context: SqlJsonExecutionContext) -> str: query_model = execution_context.query diff --git a/superset/sqllab/sql_json_executer.py b/superset/sqllab/sql_json_executer.py index 3d55047b41042..e4e6b60654b87 100644 --- a/superset/sqllab/sql_json_executer.py +++ b/superset/sqllab/sql_json_executer.py @@ -64,7 +64,7 @@ class SqlJsonExecutorBase(SqlJsonExecutor, ABC): def __init__(self, query_dao: QueryDAO, get_sql_results_task: GetSqlResultsTask): self._query_dao = query_dao - self._get_sql_results_task = get_sql_results_task # type: ignore + self._get_sql_results_task = get_sql_results_task class SynchronousSqlJsonExecutor(SqlJsonExecutorBase): @@ -163,7 +163,6 @@ def execute( rendered_query: str, log_params: Optional[Dict[str, Any]], ) -> SqlJsonExecutionStatus: - query_id = execution_context.query.id logger.info("Query %i: Running query on a Celery worker", query_id) try: diff --git a/superset/tags/models.py b/superset/tags/models.py index b30a214bd6acf..797308c30675a 100644 --- a/superset/tags/models.py +++ b/superset/tags/models.py @@ -126,7 +126,6 @@ def get_object_type(class_name: str) -> ObjectTypes: class ObjectUpdater: - object_type: Optional[str] = None @classmethod @@ -218,7 +217,6 @@ def after_delete( class ChartUpdater(ObjectUpdater): - object_type = "chart" @classmethod @@ -227,7 +225,6 @@ def get_owners_ids(cls, target: Slice) -> List[int]: class DashboardUpdater(ObjectUpdater): - object_type = "dashboard" @classmethod @@ -236,7 +233,6 @@ def get_owners_ids(cls, target: Dashboard) -> List[int]: class QueryUpdater(ObjectUpdater): - object_type = "query" @classmethod @@ -245,7 +241,6 @@ def get_owners_ids(cls, target: Query) -> List[int]: class DatasetUpdater(ObjectUpdater): - object_type = "dataset" @classmethod diff --git a/superset/tasks/async_queries.py b/superset/tasks/async_queries.py index 1157c5fd37e1f..ffd92c262747e 100644 --- a/superset/tasks/async_queries.py +++ b/superset/tasks/async_queries.py @@ -90,7 +90,11 @@ def load_chart_data_into_cache( raise ex except Exception as ex: # TODO: QueryContext should support SIP-40 style errors - error = ex.message if hasattr(ex, "message") else str(ex) # type: ignore # pylint: disable=no-member + error = ( + ex.message # pylint: disable=no-member + if hasattr(ex, "message") + else str(ex) + ) errors = [{"message": error}] async_query_manager.update_job( job_metadata, async_query_manager.STATUS_ERROR, errors=errors @@ -157,7 +161,11 @@ def load_explore_json_into_cache( # pylint: disable=too-many-locals if isinstance(ex, SupersetVizException): errors = ex.errors # pylint: disable=no-member else: - error = ex.message if hasattr(ex, "message") else str(ex) # type: ignore # pylint: disable=no-member + error = ( + ex.message # pylint: disable=no-member + if hasattr(ex, "message") + else str(ex) + ) errors = [error] async_query_manager.update_job( diff --git a/superset/utils/celery.py b/superset/utils/celery.py index 89da812ec0b02..474fc98d9416e 100644 --- a/superset/utils/celery.py +++ b/superset/utils/celery.py @@ -27,6 +27,7 @@ logger = logging.getLogger(__name__) + # Null pool is used for the celery workers due process forking side effects. # For more info see: https://github.com/apache/superset/issues/10530 @contextmanager diff --git a/superset/utils/core.py b/superset/utils/core.py index 9185cbe2fc233..d229942834e81 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -655,10 +655,10 @@ def error_msg_from_exception(ex: Exception) -> str: """ msg = "" if hasattr(ex, "message"): - if isinstance(ex.message, dict): # type: ignore + if isinstance(ex.message, dict): msg = ex.message.get("message") # type: ignore - elif ex.message: # type: ignore - msg = ex.message # type: ignore + elif ex.message: + msg = ex.message return msg or str(ex) @@ -1778,14 +1778,13 @@ def indexed( def is_test() -> bool: - return strtobool(os.environ.get("SUPERSET_TESTENV", "false")) + return strtobool(os.environ.get("SUPERSET_TESTENV", "false")) # type: ignore def get_time_filter_status( datasource: "BaseDatasource", applied_time_extras: Dict[str, str], ) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: - temporal_columns: Set[Any] if datasource.type == "query": temporal_columns = { diff --git a/superset/utils/dashboard_filter_scopes_converter.py b/superset/utils/dashboard_filter_scopes_converter.py index 3aeb32360a775..2e48c114152a3 100644 --- a/superset/utils/dashboard_filter_scopes_converter.py +++ b/superset/utils/dashboard_filter_scopes_converter.py @@ -77,7 +77,7 @@ def copy_filter_scopes( old_filter_scopes: Dict[int, Dict[str, Dict[str, Any]]], ) -> Dict[str, Dict[Any, Any]]: new_filter_scopes: Dict[str, Dict[Any, Any]] = {} - for (filter_id, scopes) in old_filter_scopes.items(): + for filter_id, scopes in old_filter_scopes.items(): new_filter_key = old_to_new_slc_id_dict.get(int(filter_id)) if new_filter_key: new_filter_scopes[str(new_filter_key)] = scopes diff --git a/superset/utils/decorators.py b/superset/utils/decorators.py index 073139574cd68..7852c4ff74725 100644 --- a/superset/utils/decorators.py +++ b/superset/utils/decorators.py @@ -47,7 +47,7 @@ def wrapped(*args: Any, **kwargs: Any) -> Any: except Exception as ex: if ( hasattr(ex, "status") - and ex.status < 500 # type: ignore # pylint: disable=no-member + and ex.status < 500 # pylint: disable=no-member ): current_app.config["STATS_LOGGER"].gauge( f"{metric_prefix_}.warning", 1 diff --git a/superset/utils/machine_auth.py b/superset/utils/machine_auth.py index d58f739f7713c..02c04abe6ae97 100644 --- a/superset/utils/machine_auth.py +++ b/superset/utils/machine_auth.py @@ -52,7 +52,7 @@ def authenticate_webdriver( :return: The WebDriver passed in (fluent) """ # Short-circuit this method if we have an override configured - if self._auth_webdriver_func_override: + if self._auth_webdriver_func_override: # type: ignore return self._auth_webdriver_func_override(driver, user) # Setting cookies requires doing a request first diff --git a/superset/views/base_api.py b/superset/views/base_api.py index e5cc9b2c0c253..0b7448b958e60 100644 --- a/superset/views/base_api.py +++ b/superset/views/base_api.py @@ -114,10 +114,7 @@ def wraps(self: BaseSupersetApiMixin, *args: Any, **kwargs: Any) -> Response: try: duration, response = time_function(f, self, *args, **kwargs) except Exception as ex: - if ( - hasattr(ex, "status") - and ex.status < 500 # type: ignore # pylint: disable=no-member - ): + if hasattr(ex, "status") and ex.status < 500: # pylint: disable=no-member self.incr_stats("warning", func_name) else: self.incr_stats("error", func_name) diff --git a/superset/views/chart/mixin.py b/superset/views/chart/mixin.py index 186e86904abef..00950fc8fc3f5 100644 --- a/superset/views/chart/mixin.py +++ b/superset/views/chart/mixin.py @@ -22,7 +22,6 @@ class SliceMixin: # pylint: disable=too-few-public-methods - list_title = _("Charts") show_title = _("Show Chart") add_title = _("Add Chart") diff --git a/superset/views/core.py b/superset/views/core.py index 5b003656762e2..7c5609ca2c312 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -2397,7 +2397,6 @@ def _create_response_from_execution_context( # pylint: disable=invalid-name, no self, command_result: CommandResult, ) -> FlaskResponse: - status_code = 200 if command_result["status"] == SqlJsonExecutionStatus.QUERY_IS_RUNNING: status_code = 202 diff --git a/superset/views/dashboard/mixin.py b/superset/views/dashboard/mixin.py index d8838b26c9d30..9fcee061a6297 100644 --- a/superset/views/dashboard/mixin.py +++ b/superset/views/dashboard/mixin.py @@ -21,7 +21,6 @@ class DashboardMixin: # pylint: disable=too-few-public-methods - list_title = _("Dashboards") show_title = _("Show Dashboard") add_title = _("Add Dashboard") diff --git a/superset/viz.py b/superset/viz.py index 87f8bbee36950..8c027a132d4d3 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -3030,7 +3030,6 @@ def get_data(self, df: pd.DataFrame) -> VizData: class RoseViz(NVD3TimeSeriesViz): - viz_type = "rose" verbose_name = _("Time Series - Nightingale Rose Chart") sort_series = False diff --git a/tests/common/logger_utils.py b/tests/common/logger_utils.py index 61e05bc45cff5..98471342b7f1a 100644 --- a/tests/common/logger_utils.py +++ b/tests/common/logger_utils.py @@ -61,7 +61,6 @@ def log( suffix_exit_msg: str = _DEFAULT_EXIT_MSG_SUFFIX, return_value_msg_part=_DEFAULT_RETURN_VALUE_MSG_PART, ) -> Decorated: - decorator: Decorated = _make_decorator( prefix_enter_msg, suffix_enter_msg, diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py index 83fb7281fbc74..2818793af0179 100644 --- a/tests/integration_tests/charts/data/api_tests.py +++ b/tests/integration_tests/charts/data/api_tests.py @@ -210,7 +210,6 @@ def test_as_samples_with_row_limit_bigger_then_sql_max_row_rowcount_as_sql_max_r {**app.config, "SAMPLES_ROW_LIMIT": 5, "SQL_MAX_ROW": 15}, ) def test_with_row_limit_as_samples__rowcount_as_row_limit(self): - expected_row_count = 10 self.query_context_payload["result_type"] = ChartDataResultType.SAMPLES self.query_context_payload["queries"][0]["row_limit"] = expected_row_count @@ -234,7 +233,6 @@ def test_with_incorrect_result_format__400(self): @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_with_invalid_payload__400(self): - invalid_query_context = {"form_data": "NOT VALID JSON"} rv = self.client.post( @@ -585,7 +583,6 @@ def test_with_not_permitted_actor__403(self): def test_when_where_parameter_is_template_and_query_result_type__query_is_templated( self, ): - self.query_context_payload["result_type"] = ChartDataResultType.QUERY self.query_context_payload["queries"][0]["filters"] = [ {"col": "gender", "op": "==", "val": "boy"} diff --git a/tests/integration_tests/cli_tests.py b/tests/integration_tests/cli_tests.py index aaa682bee0f3e..f9195a6c26684 100644 --- a/tests/integration_tests/cli_tests.py +++ b/tests/integration_tests/cli_tests.py @@ -502,7 +502,6 @@ def test_failing_import_datasets_versioned_export( @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @mock.patch("superset.tasks.thumbnails.cache_dashboard_thumbnail") def test_compute_thumbnails(thumbnail_mock, app_context, fs): - thumbnail_mock.return_value = None runner = app.test_cli_runner() dashboard = db.session.query(Dashboard).filter_by(slug="births").first() diff --git a/tests/integration_tests/csv_upload_tests.py b/tests/integration_tests/csv_upload_tests.py index 3e0200d18a26b..f8781a364968b 100644 --- a/tests/integration_tests/csv_upload_tests.py +++ b/tests/integration_tests/csv_upload_tests.py @@ -200,7 +200,7 @@ def mock_upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str: # only needed for the hive tests import docker - client = docker.from_env() + client = docker.from_env() # type: ignore container = client.containers.get("namenode") # docker mounted volume that contains csv uploads src = os.path.join("/tmp/superset_uploads", os.path.basename(filename)) diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index 8071902c455da..b9d1eb84b6a55 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -61,7 +61,6 @@ class TestDatasetApi(SupersetTestCase): - fixture_tables_names = ("ab_permission", "ab_permission_view", "ab_view_menu") fixture_virtual_table_names = ("sql_virtual_dataset_1", "sql_virtual_dataset_2") diff --git a/tests/integration_tests/db_engine_specs/bigquery_tests.py b/tests/integration_tests/db_engine_specs/bigquery_tests.py index 574a2b75e32ca..b8807b7896695 100644 --- a/tests/integration_tests/db_engine_specs/bigquery_tests.py +++ b/tests/integration_tests/db_engine_specs/bigquery_tests.py @@ -93,6 +93,7 @@ def test_fetch_data(self): """ DB Eng Specs (bigquery): Test fetch data """ + # Mock a google.cloud.bigquery.table.Row class Row(object): def __init__(self, value): diff --git a/tests/integration_tests/fixtures/energy_dashboard.py b/tests/integration_tests/fixtures/energy_dashboard.py index 202f494aa2d15..effe59a75544a 100644 --- a/tests/integration_tests/fixtures/energy_dashboard.py +++ b/tests/integration_tests/fixtures/energy_dashboard.py @@ -89,7 +89,6 @@ def _create_energy_table() -> List[Slice]: slices = [] for slice_data in _get_energy_slices(): - slice = _create_and_commit_energy_slice( table, slice_data["slice_title"], diff --git a/tests/integration_tests/fixtures/users.py b/tests/integration_tests/fixtures/users.py index b812de4448979..e8e5d7823bf1b 100644 --- a/tests/integration_tests/fixtures/users.py +++ b/tests/integration_tests/fixtures/users.py @@ -23,7 +23,6 @@ @pytest.fixture() def create_gamma_sqllab_no_data(): - with app.app_context(): gamma_role = db.session.query(Role).filter(Role.name == "Gamma").one_or_none() sqllab_role = ( diff --git a/tests/integration_tests/reports/alert_tests.py b/tests/integration_tests/reports/alert_tests.py index 6c5c41a81ff23..32cc2dcefb572 100644 --- a/tests/integration_tests/reports/alert_tests.py +++ b/tests/integration_tests/reports/alert_tests.py @@ -64,7 +64,6 @@ def test_execute_query_as_report_executor( app_context: None, get_user, ) -> None: - from superset.reports.commands.alert import AlertCommand from superset.reports.models import ReportSchedule @@ -104,7 +103,6 @@ def test_execute_query_as_report_executor( def test_execute_query_succeeded_no_retry( mocker: MockFixture, app_context: None ) -> None: - from superset.reports.commands.alert import AlertCommand execute_query_mock = mocker.patch( diff --git a/tests/integration_tests/reports/api_tests.py b/tests/integration_tests/reports/api_tests.py index 22b9be9990b74..5a981166aa374 100644 --- a/tests/integration_tests/reports/api_tests.py +++ b/tests/integration_tests/reports/api_tests.py @@ -88,7 +88,6 @@ def gamma_user_with_alerts_role(self): @pytest.fixture() def create_working_admin_report_schedule(self): with self.create_app().app_context(): - admin_user = self.get_user("admin") chart = db.session.query(Slice).first() example_db = get_example_database() @@ -114,7 +113,6 @@ def create_working_admin_report_schedule(self): @pytest.fixture() def create_working_gamma_report_schedule(self, gamma_user_with_alerts_role): with self.create_app().app_context(): - chart = db.session.query(Slice).first() example_db = get_example_database() @@ -139,7 +137,6 @@ def create_working_gamma_report_schedule(self, gamma_user_with_alerts_role): @pytest.fixture() def create_working_shared_report_schedule(self, gamma_user_with_alerts_role): with self.create_app().app_context(): - admin_user = self.get_user("admin") alpha_user = self.get_user("alpha") chart = db.session.query(Slice).first() @@ -213,7 +210,6 @@ def create_report_schedules(self): @pytest.fixture() def create_alpha_users(self): with self.create_app().app_context(): - users = [ self.create_user( "alpha1", "password", "Alpha", email="alpha1@superset.org" diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py index 8d6a76c14f67e..cad6a75a5d25e 100644 --- a/tests/integration_tests/reports/commands_tests.py +++ b/tests/integration_tests/reports/commands_tests.py @@ -466,7 +466,6 @@ def create_alert_email_chart(request): chart = db.session.query(Slice).first() example_database = get_example_database() with create_test_table_context(example_database): - report_schedule = create_report_notification( email_target="target@email.com", chart=chart, @@ -549,7 +548,6 @@ def create_no_alert_email_chart(request): chart = db.session.query(Slice).first() example_database = get_example_database() with create_test_table_context(example_database): - report_schedule = create_report_notification( email_target="target@email.com", chart=chart, @@ -584,7 +582,6 @@ def create_mul_alert_email_chart(request): chart = db.session.query(Slice).first() example_database = get_example_database() with create_test_table_context(example_database): - report_schedule = create_report_notification( email_target="target@email.com", chart=chart, @@ -619,7 +616,6 @@ def create_invalid_sql_alert_email_chart(request): chart = db.session.query(Slice).first() example_database = get_example_database() with create_test_table_context(example_database): - report_schedule = create_report_notification( email_target="target@email.com", chart=chart, @@ -1072,7 +1068,6 @@ def test_email_dashboard_report_schedule( with freeze_time("2020-01-01T00:00:00Z"): with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock: - AsyncExecuteReportScheduleCommand( TEST_ID, create_report_email_dashboard.id, datetime.utcnow() ).run() @@ -1143,7 +1138,6 @@ def test_slack_chart_report_schedule( with freeze_time("2020-01-01T00:00:00Z"): with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock: - AsyncExecuteReportScheduleCommand( TEST_ID, create_report_slack_chart.id, datetime.utcnow() ).run() @@ -1192,7 +1186,6 @@ def test_slack_chart_report_schedule_with_errors( web_client_mock.side_effect = er with pytest.raises(ReportScheduleClientErrorsException): - AsyncExecuteReportScheduleCommand( TEST_ID, create_report_slack_chart.id, datetime.utcnow() ).run() @@ -1349,7 +1342,6 @@ def test_report_schedule_working_timeout(create_report_slack_chart_working): seconds=create_report_slack_chart_working.working_timeout + 1 ) with freeze_time(current_time): - with pytest.raises(ReportScheduleWorkingTimeoutError): AsyncExecuteReportScheduleCommand( TEST_ID, create_report_slack_chart_working.id, datetime.utcnow() @@ -2020,7 +2012,6 @@ def test__send_with_multiple_errors(notification_mock, logger_mock): @patch("superset.reports.commands.execute.logger") @patch("superset.reports.commands.execute.create_notification") def test__send_with_server_errors(notification_mock, logger_mock): - notification_content = "I am some content" recipients = ["test@foo.com"] notification_mock.return_value.send.side_effect = NotificationError() diff --git a/tests/integration_tests/reports/scheduler_tests.py b/tests/integration_tests/reports/scheduler_tests.py index 3dd6e72941e2e..15e945264d3fa 100644 --- a/tests/integration_tests/reports/scheduler_tests.py +++ b/tests/integration_tests/reports/scheduler_tests.py @@ -43,7 +43,6 @@ def test_scheduler_celery_timeout_ny(execute_mock, owners): Reports scheduler: Test scheduler setting celery soft and hard timeout """ with app.app_context(): - report_schedule = insert_report_schedule( type=ReportScheduleType.ALERT, name="report", @@ -91,7 +90,6 @@ def test_scheduler_celery_timeout_utc(execute_mock, owners): Reports scheduler: Test scheduler setting celery soft and hard timeout """ with app.app_context(): - report_schedule = insert_report_schedule( type=ReportScheduleType.ALERT, name="report", diff --git a/tests/integration_tests/security/row_level_security_tests.py b/tests/integration_tests/security/row_level_security_tests.py index ebd95cae39bd7..bcf21d149cf31 100644 --- a/tests/integration_tests/security/row_level_security_tests.py +++ b/tests/integration_tests/security/row_level_security_tests.py @@ -152,7 +152,6 @@ def tearDown(self): @pytest.fixture() def create_dataset(self): with self.create_app().app_context(): - dataset = SqlaTable(database_id=1, schema=None, table_name="table1") db.session.add(dataset) db.session.flush() diff --git a/tests/integration_tests/thumbnails_tests.py b/tests/integration_tests/thumbnails_tests.py index efa0d73cb49f0..62641886e095e 100644 --- a/tests/integration_tests/thumbnails_tests.py +++ b/tests/integration_tests/thumbnails_tests.py @@ -199,7 +199,6 @@ def test_screenshot_selenium_animation_wait( class TestThumbnails(SupersetTestCase): - mock_image = b"bytes mock image" digest_return_value = "foo_bar" digest_hash = "5c7d96a3dd7a87850a2ef34087565a6e" diff --git a/tests/unit_tests/pandas_postprocessing/test_flatten.py b/tests/unit_tests/pandas_postprocessing/test_flatten.py index fea84f7b9f5b0..99dbb7da038f9 100644 --- a/tests/unit_tests/pandas_postprocessing/test_flatten.py +++ b/tests/unit_tests/pandas_postprocessing/test_flatten.py @@ -86,7 +86,13 @@ def test_flat_should_drop_index_level(): df = pd.DataFrame(index=index, columns=columns, data=1) # drop level by index - assert pp.flatten(df.copy(), drop_levels=(0, 1,)).equals( + assert pp.flatten( + df.copy(), + drop_levels=( + 0, + 1, + ), + ).equals( pd.DataFrame( { "__timestamp": index,