diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 3e95d63497cdc..8677be6d69461 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=C,R,W import logging from collections import OrderedDict from datetime import datetime, timedelta @@ -102,7 +101,7 @@ def query(self, query_obj: QueryObjectDict) -> QueryResult: status = utils.QueryStatus.SUCCESS try: df = pd.read_sql_query(qry.statement, db.engine) - except Exception as ex: + except Exception as ex: # pylint: disable=broad-except df = pd.DataFrame() status = utils.QueryStatus.FAILED logger.exception(ex) @@ -228,7 +227,7 @@ def get_timestamp_expression( """ label = utils.DTTM_ALIAS - db = self.table.database + db_ = self.table.database pdf = self.python_date_format is_epoch = pdf in ("epoch_s", "epoch_ms") if not self.expression and not time_grain and not is_epoch: @@ -238,7 +237,7 @@ def get_timestamp_expression( col = literal_column(self.expression) else: col = column(self.column_name) - time_expr = db.db_engine_spec.get_timestamp_expr( + time_expr = db_.db_engine_spec.get_timestamp_expr( col, pdf, time_grain, self.type ) return self.table.make_sqla_column_compatible(time_expr, label) @@ -387,7 +386,9 @@ def lookup_obj(lookup_metric: SqlMetric) -> SqlMetric: ) -class SqlaTable(Model, BaseDatasource): +class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-attributes + Model, BaseDatasource +): """An ORM object for SqlAlchemy table references""" @@ -461,7 +462,7 @@ def make_sqla_column_compatible( if db_engine_spec.allows_column_aliases: label = db_engine_spec.make_label_compatible(label_expected) sqla_col = sqla_col.label(label) - sqla_col._df_label_expected = label_expected + sqla_col._df_label_expected = label_expected # pylint: disable=protected-access return sqla_col def __repr__(self) -> str: @@ -560,8 +561,7 @@ def any_dttm_col(self) -> Optional[str]: @property def html(self) -> str: - t = ((c.column_name, c.type) for c in self.columns) - df = pd.DataFrame(t) + df = pd.DataFrame((c.column_name, c.type) for c in self.columns) df.columns = ["field", "type"] return df.to_html( index=False, @@ -598,18 +598,18 @@ def select_star(self) -> Optional[str]: @property def data(self) -> Dict[str, Any]: - d = super().data + data_ = super().data if self.type == "table": grains = self.database.grains() or [] if grains: grains = [(g.duration, g.name) for g in grains] - d["granularity_sqla"] = utils.choicify(self.dttm_cols) - d["time_grain_sqla"] = grains - d["main_dttm_col"] = self.main_dttm_col - d["fetch_values_predicate"] = self.fetch_values_predicate - d["template_params"] = self.template_params - d["is_sqllab_view"] = self.is_sqllab_view - return d + data_["granularity_sqla"] = utils.choicify(self.dttm_cols) + data_["time_grain_sqla"] = grains + data_["main_dttm_col"] = self.main_dttm_col + data_["fetch_values_predicate"] = self.fetch_values_predicate + data_["template_params"] = self.template_params + data_["is_sqllab_view"] = self.is_sqllab_view + return data_ def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]: """Runs query against sqla to retrieve some @@ -642,10 +642,10 @@ def mutate_query_from_config(self, sql: str) -> str: """Apply config's SQL_QUERY_MUTATOR Typically adds comments to the query with context""" - SQL_QUERY_MUTATOR = config["SQL_QUERY_MUTATOR"] - if SQL_QUERY_MUTATOR: + sql_query_mutator = config["SQL_QUERY_MUTATOR"] + if sql_query_mutator: username = utils.get_username() - sql = SQL_QUERY_MUTATOR(sql, username, security_manager, self.database) + sql = sql_query_mutator(sql, username, security_manager, self.database) return sql def get_template_processor(self, **kwargs: Any) -> BaseTemplateProcessor: @@ -717,9 +717,11 @@ def _get_sqla_row_level_filters( self, template_processor: BaseTemplateProcessor ) -> List[str]: """ - Return the appropriate row level security filters for this table and the current user. + Return the appropriate row level security filters for + this table and the current user. - :param BaseTemplateProcessor template_processor: The template processor to apply to the filters. + :param BaseTemplateProcessor template_processor: The template + processor to apply to the filters. :returns: A list of SQL clauses to be ANDed together. :rtype: List[str] """ @@ -728,7 +730,7 @@ def _get_sqla_row_level_filters( for f in security_manager.get_rls_filters(self) ] - def get_sqla_query( # sqla + def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements self, metrics: List[Metric], granularity: str, @@ -736,7 +738,9 @@ def get_sqla_query( # sqla to_dttm: Optional[datetime], columns: Optional[List[str]] = None, groupby: Optional[List[str]] = None, - filter: Optional[List[Dict[str, Any]]] = None, + filter: Optional[ # pylint: disable=redefined-builtin + List[Dict[str, Any]] + ] = None, # pylint: disable=bad-whitespace is_timeseries: bool = True, timeseries_limit: int = 15, timeseries_limit_metric: Optional[Metric] = None, @@ -793,14 +797,14 @@ def get_sqla_query( # sqla ): raise Exception(_("Empty query?")) metrics_exprs: List[ColumnElement] = [] - for m in metrics: - if utils.is_adhoc_metric(m): - assert isinstance(m, dict) - metrics_exprs.append(self.adhoc_metric_to_sqla(m, cols)) - elif isinstance(m, str) and m in metrics_dict: - metrics_exprs.append(metrics_dict[m].get_sqla_col()) + for metric in metrics: + if utils.is_adhoc_metric(metric): + assert isinstance(metric, dict) + metrics_exprs.append(self.adhoc_metric_to_sqla(metric, cols)) + elif isinstance(metric, str) and metric in metrics_dict: + metrics_exprs.append(metrics_dict[metric].get_sqla_col()) else: - raise Exception(_("Metric '%(metric)s' does not exist", metric=m)) + raise Exception(_("Metric '%(metric)s' does not exist", metric=metric)) if metrics_exprs: main_metric_expr = metrics_exprs[0] else: @@ -817,21 +821,21 @@ def get_sqla_query( # sqla groupby = list(dict.fromkeys(columns_)) select_exprs = [] - for s in groupby: - if s in cols: - outer = cols[s].get_sqla_col() + for selected in groupby: + if selected in cols: + outer = cols[selected].get_sqla_col() else: - outer = literal_column(f"({s})") - outer = self.make_sqla_column_compatible(outer, s) + outer = literal_column(f"({selected})") + outer = self.make_sqla_column_compatible(outer, selected) groupby_exprs_sans_timestamp[outer.name] = outer select_exprs.append(outer) elif columns: - for s in columns: + for selected in columns: select_exprs.append( - cols[s].get_sqla_col() - if s in cols - else self.make_sqla_column_compatible(literal_column(s)) + cols[selected].get_sqla_col() + if selected in cols + else self.make_sqla_column_compatible(literal_column(selected)) ) metrics_exprs = [] @@ -865,7 +869,10 @@ def get_sqla_query( # sqla select_exprs += metrics_exprs - labels_expected = [c._df_label_expected for c in select_exprs] + labels_expected = [ + c._df_label_expected # pylint: disable=protected-access + for c in select_exprs + ] select_exprs = db_engine_spec.make_select_compatible( groupby_exprs_with_timestamp.values(), select_exprs @@ -902,7 +909,11 @@ def get_sqla_query( # sqla ): cond = col_obj.get_sqla_col().in_(eq) if isinstance(eq, str) and NULL_STRING in eq: - cond = or_(cond, col_obj.get_sqla_col() is None) + cond = or_( + cond, + col_obj.get_sqla_col() # pylint: disable=singleton-comparison + == None, + ) if op == utils.FilterOperator.NOT_IN.value: cond = ~cond where_clause_and.append(cond) @@ -924,9 +935,15 @@ def get_sqla_query( # sqla elif op == utils.FilterOperator.LIKE.value: where_clause_and.append(col_obj.get_sqla_col().like(eq)) elif op == utils.FilterOperator.IS_NULL.value: - where_clause_and.append(col_obj.get_sqla_col() == None) + where_clause_and.append( + col_obj.get_sqla_col() # pylint: disable=singleton-comparison + == None + ) elif op == utils.FilterOperator.IS_NOT_NULL.value: - where_clause_and.append(col_obj.get_sqla_col() != None) + where_clause_and.append( + col_obj.get_sqla_col() # pylint: disable=singleton-comparison + != None + ) else: raise Exception( _("Invalid filter operation type: %(op)s", op=op) @@ -953,7 +970,9 @@ def get_sqla_query( # sqla # To ensure correct handling of the ORDER BY labeling we need to reference the # metric instance if defined in the SELECT clause. - metrics_exprs_by_label = {m._label: m for m in metrics_exprs} + metrics_exprs_by_label = { + m._label: m for m in metrics_exprs # pylint: disable=protected-access + } for col, ascending in orderby: direction = asc if ascending else desc @@ -962,8 +981,10 @@ def get_sqla_query( # sqla elif col in cols: col = cols[col].get_sqla_col() - if isinstance(col, Label) and col._label in metrics_exprs_by_label: - col = metrics_exprs_by_label[col._label] + if isinstance(col, Label): + label = col._label # pylint: disable=protected-access + if label in metrics_exprs_by_label: + col = metrics_exprs_by_label[label] qry = qry.order_by(direction(col)) @@ -973,7 +994,7 @@ def get_sqla_query( # sqla qry = qry.offset(row_offset) if ( - is_timeseries + is_timeseries # pylint: disable=too-many-boolean-expressions and timeseries_limit and not time_groupby_inline and ((is_sip_38 and columns) or (not is_sip_38 and groupby)) @@ -1087,14 +1108,14 @@ def _get_timeseries_orderby( return ob - def _get_top_groups( + def _get_top_groups( # pylint: disable=no-self-use self, df: pd.DataFrame, dimensions: List[str], groupby_exprs: "OrderedDict[str, Any]", ) -> ColumnElement: groups = [] - for unused, row in df.iterrows(): + for _unused, row in df.iterrows(): group = [] for dimension in dimensions: group.append(groupby_exprs[dimension] == row[dimension]) @@ -1126,15 +1147,16 @@ def mutator(df: pd.DataFrame) -> None: f"For {sql}, df.columns: {df.columns}" f" differs from {labels_expected}" ) - else: - df.columns = labels_expected + df.columns = labels_expected try: df = self.database.get_df(sql, self.schema, mutator) - except Exception as ex: + except Exception as ex: # pylint: disable=broad-except df = pd.DataFrame() status = utils.QueryStatus.FAILED - logger.warning(f"Query {sql} on schema {self.schema} failed", exc_info=True) + logger.warning( + "Query %s on schema %s failed", sql, self.schema, exc_info=True + ) db_engine_spec = self.database.db_engine_spec errors = db_engine_spec.extract_errors(ex) @@ -1152,7 +1174,7 @@ def get_sqla_table_object(self) -> Table: def fetch_metadata(self, commit: bool = True) -> None: """Fetches the metadata for the table and merges it in""" try: - table = self.get_sqla_table_object() + table_ = self.get_sqla_table_object() except Exception as ex: logger.exception(ex) raise Exception( @@ -1169,24 +1191,22 @@ def fetch_metadata(self, commit: bool = True) -> None: dbcols = ( db.session.query(TableColumn) .filter(TableColumn.table == self) - .filter(or_(TableColumn.column_name == col.name for col in table.columns)) + .filter(or_(TableColumn.column_name == col.name for col in table_.columns)) ) dbcols = {dbcol.column_name: dbcol for dbcol in dbcols} - for col in table.columns: + for col in table_.columns: try: datatype = db_engine_spec.column_datatype_to_string( col.type, db_dialect ) - except Exception as ex: + except Exception as ex: # pylint: disable=broad-except datatype = "UNKNOWN" - logger.error("Unrecognized data type in {}.{}".format(table, col.name)) + logger.error("Unrecognized data type in %s.%s", table_, col.name) logger.exception(ex) dbcol = dbcols.get(col.name, None) if not dbcol: dbcol = TableColumn(column_name=col.name, type=datatype, table=self) - dbcol.sum = dbcol.is_numeric - dbcol.avg = dbcol.is_numeric dbcol.is_dttm = dbcol.is_temporal db_engine_spec.alter_new_orm_column(dbcol) else: @@ -1227,30 +1247,30 @@ def import_obj( superset instances. Audit metadata isn't copies over. """ - def lookup_sqlatable(table: "SqlaTable") -> "SqlaTable": + def lookup_sqlatable(table_: "SqlaTable") -> "SqlaTable": return ( db.session.query(SqlaTable) .join(Database) .filter( - SqlaTable.table_name == table.table_name, - SqlaTable.schema == table.schema, - Database.id == table.database_id, + SqlaTable.table_name == table_.table_name, + SqlaTable.schema == table_.schema, + Database.id == table_.database_id, ) .first() ) - def lookup_database(table: SqlaTable) -> Database: + def lookup_database(table_: SqlaTable) -> Database: try: return ( db.session.query(Database) - .filter_by(database_name=table.params_dict["database_name"]) + .filter_by(database_name=table_.params_dict["database_name"]) .one() ) except NoResultFound: raise DatabaseNotFound( _( "Database '%(name)s' is not found", - name=table.params_dict["database_name"], + name=table_.params_dict["database_name"], ) ) diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py index 39a898467a40d..5f3466fb5f7e1 100644 --- a/superset/connectors/sqla/views.py +++ b/superset/connectors/sqla/views.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=C,R,W """Views used by the SqlAlchemy connector""" import logging import re @@ -30,8 +29,9 @@ from wtforms.ext.sqlalchemy.fields import QuerySelectField from wtforms.validators import Regexp -from superset import app, db, security_manager +from superset import app, db from superset.connectors.base.views import DatasourceModelView +from superset.connectors.sqla import models from superset.constants import RouteMethod from superset.typing import FlaskResponse from superset.utils import core as utils @@ -45,12 +45,12 @@ YamlExportMixin, ) -from . import models - logger = logging.getLogger(__name__) -class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): +class TableColumnInlineView( # pylint: disable=too-many-ancestors + CompactCRUDMixin, SupersetModelView +): datamodel = SQLAInterface(models.TableColumn) # TODO TODO, review need for this on related_views include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET @@ -168,7 +168,9 @@ class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): edit_form_extra_fields = add_form_extra_fields -class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): +class SqlMetricInlineView( # pylint: disable=too-many-ancestors + CompactCRUDMixin, SupersetModelView +): datamodel = SQLAInterface(models.SqlMetric) include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET @@ -228,7 +230,9 @@ class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): edit_form_extra_fields = add_form_extra_fields -class RowLevelSecurityFiltersModelView(SupersetModelView, DeleteMixin): +class RowLevelSecurityFiltersModelView( # pylint: disable=too-many-ancestors + SupersetModelView, DeleteMixin +): datamodel = SQLAInterface(models.RowLevelSecurityFilter) list_title = _("Row level security filter") @@ -248,7 +252,8 @@ class RowLevelSecurityFiltersModelView(SupersetModelView, DeleteMixin): "roles": _("These are the roles this filter will be applied to."), "clause": _( "This is the condition that will be added to the WHERE clause. " - "For example, to only return rows for a particular client, you might put in: client_id = 9" + "For example, to only return rows for a particular client, " + "you might put in: client_id = 9" ), } label_columns = { @@ -260,7 +265,9 @@ class RowLevelSecurityFiltersModelView(SupersetModelView, DeleteMixin): } -class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin): +class TableModelView( # pylint: disable=too-many-ancestors + DatasourceModelView, DeleteMixin, YamlExportMixin +): datamodel = SQLAInterface(models.SqlaTable) include_route_methods = RouteMethod.CRUD_SET @@ -377,12 +384,14 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin): ) } - def pre_add(self, table: "TableModelView") -> None: - validate_sqlatable(table) + def pre_add(self, item: "TableModelView") -> None: + validate_sqlatable(item) - def post_add(self, table: "TableModelView", flash_message: bool = True) -> None: - table.fetch_metadata() - create_table_permissions(table) + def post_add( # pylint: disable=arguments-differ + self, item: "TableModelView", flash_message: bool = True + ) -> None: + item.fetch_metadata() + create_table_permissions(item) if flash_message: flash( _( @@ -394,8 +403,8 @@ def post_add(self, table: "TableModelView", flash_message: bool = True) -> None: "info", ) - def post_update(self, table: "TableModelView") -> None: - self.post_add(table, flash_message=False) + def post_update(self, item: "TableModelView") -> None: + self.post_add(item, flash_message=False) def _delete(self, pk: int) -> None: DeleteMixin._delete(self, pk) @@ -412,19 +421,19 @@ def edit(self, pk: int) -> FlaskResponse: @action( "refresh", __("Refresh Metadata"), __("Refresh column metadata"), "fa-refresh" ) - def refresh( + def refresh( # pylint: disable=no-self-use self, tables: Union["TableModelView", List["TableModelView"]] ) -> FlaskResponse: if not isinstance(tables, list): tables = [tables] successes = [] failures = [] - for t in tables: + for table_ in tables: try: - t.fetch_metadata() - successes.append(t) - except Exception: - failures.append(t) + table_.fetch_metadata() + successes.append(table_) + except Exception: # pylint: disable=broad-except + failures.append(table_) if len(successes) > 0: success_msg = _( diff --git a/superset/result_set.py b/superset/result_set.py index dd6f0ffa9a18a..38ca74d4d2ceb 100644 --- a/superset/result_set.py +++ b/superset/result_set.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=C,R,W """ Superset wrapper around pyarrow.Table. """ import datetime @@ -48,14 +47,14 @@ def dedup(l: List[str], suffix: str = "__", case_sensitive: bool = True) -> List """ new_l: List[str] = [] seen: Dict[str, int] = {} - for s in l: - s_fixed_case = s if case_sensitive else s.lower() + for item in l: + s_fixed_case = item if case_sensitive else item.lower() if s_fixed_case in seen: seen[s_fixed_case] += 1 - s += suffix + str(seen[s_fixed_case]) + item += suffix + str(seen[s_fixed_case]) else: seen[s_fixed_case] = 0 - new_l.append(s) + new_l.append(item) return new_l @@ -69,7 +68,7 @@ def stringify_values(array: np.ndarray) -> np.ndarray: class SupersetResultSet: - def __init__( + def __init__( # pylint: disable=too-many-locals,too-many-branches self, data: DbapiResult, cursor_description: DbapiDescription, @@ -108,13 +107,14 @@ def __init__( pa.lib.ArrowInvalid, pa.lib.ArrowTypeError, pa.lib.ArrowNotImplementedError, - TypeError, # this is super hackey, https://issues.apache.org/jira/browse/ARROW-7855 + TypeError, # this is super hackey, + # https://issues.apache.org/jira/browse/ARROW-7855 ): # attempt serialization of values as strings stringified_arr = stringify_values(array[column]) pa_data.append(pa.array(stringified_arr.tolist())) - if pa_data: + if pa_data: # pylint: disable=too-many-nested-blocks for i, column in enumerate(column_names): if pa.types.is_nested(pa_data[i].type): # TODO: revisit nested column serialization once nested types @@ -124,7 +124,8 @@ def __init__( pa_data[i] = pa.array(stringified_arr.tolist()) elif pa.types.is_temporal(pa_data[i].type): - # workaround for bug converting `psycopg2.tz.FixedOffsetTimezone` tzinfo values. + # workaround for bug converting + # `psycopg2.tz.FixedOffsetTimezone` tzinfo values. # related: https://issues.apache.org/jira/browse/ARROW-5248 sample = self.first_nonempty(array[column]) if sample and isinstance(sample, datetime.datetime): @@ -138,7 +139,7 @@ def __init__( pa_data[i] = pa.Array.from_pandas( series, type=pa.timestamp("ns", tz=tz) ) - except Exception as ex: + except Exception as ex: # pylint: disable=broad-except logger.exception(ex) self.table = pa.Table.from_arrays(pa_data, names=column_names) @@ -150,7 +151,7 @@ def __init__( for i, col in enumerate(column_names) if deduped_cursor_desc } - except Exception as ex: + except Exception as ex: # pylint: disable=broad-except logger.exception(ex) @staticmethod diff --git a/superset/security/manager.py b/superset/security/manager.py index 3d03ad058bc33..d0276371f5a1a 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=C,R,W +# pylint: disable=too-few-public-methods """A set of constants and methods to manage permissions and security""" import logging from typing import Any, Callable, cast, List, Optional, Set, Tuple, TYPE_CHECKING, Union @@ -102,7 +102,9 @@ def __init__(self, **kwargs: Any) -> None: RoleModelView.related_views = [] -class SupersetSecurityManager(SecurityManager): +class SupersetSecurityManager( # pylint: disable=too-many-public-methods + SecurityManager +): userstatschartview = None READ_ONLY_MODEL_VIEWS = {"DatabaseAsync", "DatabaseView", "DruidClusterModelView"} @@ -166,7 +168,7 @@ class SupersetSecurityManager(SecurityManager): ACCESSIBLE_PERMS = {"can_userinfo"} - def get_schema_perm( + def get_schema_perm( # pylint: disable=no-self-use self, database: Union["Database", str], schema: Optional[str] = None ) -> Optional[str]: """ @@ -182,7 +184,9 @@ def get_schema_perm( return None - def unpack_schema_perm(self, schema_permission: str) -> Tuple[str, str]: + def unpack_schema_perm( # pylint: disable=no-self-use + self, schema_permission: str + ) -> Tuple[str, str]: # [database_name].[schema_name] schema_name = schema_permission.split(".")[1][1:-1] database_name = schema_permission.split(".")[0][1:-1] @@ -284,7 +288,8 @@ def can_access_datasource(self, datasource: "BaseDatasource") -> bool: return True - def get_datasource_access_error_msg(self, datasource: "BaseDatasource") -> str: + @staticmethod + def get_datasource_access_error_msg(datasource: "BaseDatasource") -> str: """ Return the error message for the denied Superset datasource. @@ -295,7 +300,10 @@ def get_datasource_access_error_msg(self, datasource: "BaseDatasource") -> str: return f"""This endpoint requires the datasource {datasource.name}, database or `all_datasource_access` permission""" - def get_datasource_access_link(self, datasource: "BaseDatasource") -> Optional[str]: + @staticmethod + def get_datasource_access_link( # pylint: disable=unused-argument + datasource: "BaseDatasource", + ) -> Optional[str]: """ Return the link for the denied Superset datasource. @@ -307,7 +315,7 @@ def get_datasource_access_link(self, datasource: "BaseDatasource") -> Optional[s return conf.get("PERMISSION_INSTRUCTIONS_LINK") - def get_datasource_access_error_object( + def get_datasource_access_error_object( # pylint: disable=invalid-name self, datasource: "BaseDatasource" ) -> SupersetError: """ @@ -326,7 +334,9 @@ def get_datasource_access_error_object( }, ) - def get_table_access_error_msg(self, tables: Set["Table"]) -> str: + def get_table_access_error_msg( # pylint: disable=no-self-use + self, tables: Set["Table"] + ) -> str: """ Return the error message for the denied SQL tables. @@ -355,7 +365,9 @@ def get_table_access_error_object(self, tables: Set["Table"]) -> SupersetError: }, ) - def get_table_access_link(self, tables: Set["Table"]) -> Optional[str]: + def get_table_access_link( # pylint: disable=unused-argument,no-self-use + self, tables: Set["Table"] + ) -> Optional[str]: """ Return the access link for the denied SQL tables. @@ -412,7 +424,7 @@ def user_view_menu_names(self, permission_name: str) -> Set[str]: .filter(self.user_model.id == g.user.id) .filter(self.permission_model.name == permission_name) ).all() - return set([s.name for s in view_menu_names]) + return {s.name for s in view_menu_names} # Properly treat anonymous user public_role = self.get_public_role() @@ -423,7 +435,7 @@ def user_view_menu_names(self, permission_name: str) -> Set[str]: self.permission_model.name == permission_name ) ).all() - return set([s.name for s in view_menu_names]) + return {s.name for s in view_menu_names} return set() def get_schemas_accessible_by_user( @@ -466,7 +478,7 @@ def get_schemas_accessible_by_user( return [s for s in schemas if s in accessible_schemas] - def get_datasources_accessible_by_user( + def get_datasources_accessible_by_user( # pylint: disable=invalid-name self, database: "Database", datasource_names: List[DatasourceName], @@ -499,9 +511,9 @@ def get_datasources_accessible_by_user( if schema: names = {d.table_name for d in user_datasources if d.schema == schema} return [d for d in datasource_names if d in names] - else: - full_names = {d.full_name for d in user_datasources} - return [d for d in datasource_names if f"[{database}].[{d}]" in full_names] + + full_names = {d.full_name for d in user_datasources} + return [d for d in datasource_names if f"[{database}].[{d}]" in full_names] def merge_perm(self, permission_name: str, view_menu_name: str) -> None: """ @@ -579,12 +591,17 @@ def clean_perms(self) -> None: logger.info("Cleaning faulty perms") sesh = self.get_session pvms = sesh.query(PermissionView).filter( - or_(PermissionView.permission == None, PermissionView.view_menu == None,) + or_( + PermissionView.permission # pylint: disable=singleton-comparison + == None, + PermissionView.view_menu # pylint: disable=singleton-comparison + == None, + ) ) deleted_count = pvms.delete() sesh.commit() if deleted_count: - logger.info("Deleted {} faulty permissions".format(deleted_count)) + logger.info("Deleted %i faulty permissions", deleted_count) def sync_role_definitions(self) -> None: """ @@ -623,7 +640,7 @@ def set_role( :param pvm_check: The FAB permission/view check """ - logger.info("Syncing {} perms".format(role_name)) + logger.info("Syncing %s perms", role_name) sesh = self.get_session pvms = sesh.query(PermissionView).all() pvms = [p for p in pvms if p.permission and p.view_menu] @@ -750,7 +767,9 @@ def _is_sql_lab_pvm(self, pvm: PermissionModelView) -> bool: ) ) - def _is_granter_pvm(self, pvm: PermissionModelView) -> bool: + def _is_granter_pvm( # pylint: disable=no-self-use + self, pvm: PermissionModelView + ) -> bool: """ Return True if the user can grant the FAB permission/view, False otherwise. @@ -761,7 +780,7 @@ def _is_granter_pvm(self, pvm: PermissionModelView) -> bool: return pvm.permission.name in {"can_override_role_permissions", "can_approve"} - def set_perm( + def set_perm( # pylint: disable=no-self-use,unused-argument self, mapper: Mapper, connection: Connection, target: "BaseDatasource" ) -> None: """ @@ -834,7 +853,7 @@ def set_perm( ) ) - def raise_for_access( + def raise_for_access( # pylint: disable=too-many-arguments,too-many-branches self, database: Optional["Database"] = None, datasource: Optional["BaseDatasource"] = None, @@ -886,15 +905,15 @@ def raise_for_access( ) # Access to any datasource is suffice. - for datasource in datasources: - if self.can_access("datasource_access", datasource.perm): + for datasource_ in datasources: + if self.can_access("datasource_access", datasource_.perm): break else: denied.add(table_) if denied: raise SupersetSecurityException( - self.get_table_access_error_object(denied), + self.get_table_access_error_object(denied) ) if datasource or query_context or viz: @@ -910,10 +929,12 @@ def raise_for_access( or self.can_access("datasource_access", datasource.perm or "") ): raise SupersetSecurityException( - self.get_datasource_access_error_object(datasource), + self.get_datasource_access_error_object(datasource) ) - def get_rls_filters(self, table: "BaseDatasource") -> List[SqlaQuery]: + def get_rls_filters( # pylint: disable=no-self-use + self, table: "BaseDatasource" + ) -> List[SqlaQuery]: """ Retrieves the appropriate row level security filters for the current user and the passed table. diff --git a/superset/tasks/thumbnails.py b/superset/tasks/thumbnails.py index 11977004531c1..564a3124e9088 100644 --- a/superset/tasks/thumbnails.py +++ b/superset/tasks/thumbnails.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=C,R,W """Utility functions used across Superset""" @@ -34,20 +33,22 @@ def cache_chart_thumbnail(chart_id: int, force: bool = False) -> None: with app.app_context(): # type: ignore if not thumbnail_cache: logger.warning("No cache set, refusing to compute") - return None - logging.info(f"Caching chart {chart_id}") + return + logging.info("Caching chart %i", chart_id) screenshot = ChartScreenshot(model_id=chart_id) user = security_manager.find_user(current_app.config["THUMBNAIL_SELENIUM_USER"]) screenshot.compute_and_cache(user=user, cache=thumbnail_cache, force=force) @celery_app.task(name="cache_dashboard_thumbnail", soft_time_limit=300) -def cache_dashboard_thumbnail(dashboard_id: int, force: bool = False) -> None: +def cache_dashboard_thumbnail( # pylint: disable=inconsistent-return-statements + dashboard_id: int, force: bool = False +) -> None: with app.app_context(): # type: ignore if not thumbnail_cache: logging.warning("No cache set, refusing to compute") - return None - logger.info(f"Caching dashboard {dashboard_id}") + return + logger.info("Caching dashboard %i", dashboard_id) screenshot = DashboardScreenshot(model_id=dashboard_id) user = security_manager.find_user(current_app.config["THUMBNAIL_SELENIUM_USER"]) screenshot.compute_and_cache(user=user, cache=thumbnail_cache, force=force)