diff --git a/superset/commands/dataset/create.py b/superset/commands/dataset/create.py index aa5d62b2f2196..ae6a0af4ed324 100644 --- a/superset/commands/dataset/create.py +++ b/superset/commands/dataset/create.py @@ -54,13 +54,12 @@ def run(self) -> Model: def validate(self) -> None: exceptions: list[ValidationError] = [] database_id = self._properties["database"] - schema = self._properties.get("schema") catalog = self._properties.get("catalog") + schema = self._properties.get("schema") + table_name = self._properties["table_name"] sql = self._properties.get("sql") owner_ids: Optional[list[int]] = self._properties.get("owners") - table = Table(self._properties["table_name"], schema, catalog) - # Validate/Populate database database = DatasetDAO.get_database_by_id(database_id) if not database: @@ -68,8 +67,14 @@ def validate(self) -> None: self._properties["database"] = database # Validate uniqueness - if database and not DatasetDAO.validate_uniqueness(database, table): - exceptions.append(DatasetExistsValidationError(table)) + if database: + if not catalog: + catalog = self._properties["catalog"] = database.get_default_catalog() + + table = Table(table_name, schema, catalog) + + if not DatasetDAO.validate_uniqueness(database, table): + exceptions.append(DatasetExistsValidationError(table)) # Validate table exists on dataset if sql is not provided # This should be validated when the dataset is physical diff --git a/superset/commands/dataset/update.py b/superset/commands/dataset/update.py index e0841c85ba1e2..2772cc0ffa1f6 100644 --- a/superset/commands/dataset/update.py +++ b/superset/commands/dataset/update.py @@ -93,10 +93,16 @@ def validate(self) -> None: database_id = self._properties.get("database") + catalog = self._properties.get("catalog") + if not catalog: + catalog = self._properties["catalog"] = ( + self._model.database.get_default_catalog() + ) + table = Table( self._properties.get("table_name"), # type: ignore self._properties.get("schema"), - self._properties.get("catalog"), + catalog, ) # Validate uniqueness diff --git a/superset/databases/api.py b/superset/databases/api.py index d490ac70dab55..695ea028b476d 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -1159,7 +1159,7 @@ def select_star( self.incr_stats("init", self.select_star.__name__) try: result = database.select_star( - Table(table_name, schema_name), + Table(table_name, schema_name, database.get_default_catalog()), latest_partition=True, ) except NoSuchTableError: diff --git a/superset/jinja_context.py b/superset/jinja_context.py index 8d59eade155b8..10428db34e85a 100644 --- a/superset/jinja_context.py +++ b/superset/jinja_context.py @@ -501,6 +501,7 @@ def process_template(self, sql: str, **kwargs: Any) -> str: kwargs.update(self._context) context = validate_template_context(self.engine, kwargs) + print("FOO", type(template.render(context))) return template.render(context) @@ -565,7 +566,7 @@ def process_template(self, sql: str, **kwargs: Any) -> str: """ Makes processing a template a noop """ - return sql + return str(sql) class PrestoTemplateProcessor(JinjaTemplateProcessor): diff --git a/superset/sqllab/api.py b/superset/sqllab/api.py index cdb331c19bc33..f7d66ed4e19fa 100644 --- a/superset/sqllab/api.py +++ b/superset/sqllab/api.py @@ -284,6 +284,7 @@ def export_csv(self, client_id: str) -> CsvResponse: "client_id": client_id, "row_count": row_count, "database": query.database.name, + "catalog": query.catalog, "schema": query.schema, "sql": query.sql, "exported_format": "csv", diff --git a/superset/sqllab/sqllab_execution_context.py b/superset/sqllab/sqllab_execution_context.py index 5ca180d101b55..ab0f91bbf30ca 100644 --- a/superset/sqllab/sqllab_execution_context.py +++ b/superset/sqllab/sqllab_execution_context.py @@ -125,6 +125,8 @@ def select_as_cta(self) -> bool: def set_database(self, database: Database) -> None: self._validate_db(database) self.database = database + if self.catalog is None: + self.catalog = database.get_default_catalog() if self.select_as_cta: schema_name = self._get_ctas_target_schema_name(database) self.create_table_as_select.target_schema_name = schema_name # type: ignore diff --git a/superset/views/sql_lab/views.py b/superset/views/sql_lab/views.py index 3ec3667267471..3b24f7c0eca3b 100644 --- a/superset/views/sql_lab/views.py +++ b/superset/views/sql_lab/views.py @@ -239,6 +239,7 @@ def post(self) -> FlaskResponse: db.session.query(TableSchema).filter( TableSchema.tab_state_id == table["queryEditorId"], TableSchema.database_id == table["dbId"], + TableSchema.catalog == table["catalog"], TableSchema.schema == table["schema"], TableSchema.table == table["name"], ).delete(synchronize_session=False) @@ -246,6 +247,7 @@ def post(self) -> FlaskResponse: table_schema = TableSchema( tab_state_id=table["queryEditorId"], database_id=table["dbId"], + catalog=table["catalog"], schema=table["schema"], table=table["name"], description=json.dumps(table),