Skip to content

Commit

Permalink
test: birth names (#12226)
Browse files Browse the repository at this point in the history
* add birth names fixture

* fix birth names related tests

* fix test_import_v0_dataset_cli_export columns order

* fix celery tests drop table

* fix mysql datetime type

* fix mysql typo in charts/api_tests

* refactor

* add licence

* fix use fixture for presto

* fix presto, hive query

* fix flaky metadata

* fix mysql bigint type

* fix run query

* fix hive datatype in metadata

* fix slice owner for cypress

* refactor num_boys num_girls

* fix is_dttm column

* debug logging

* fix query offset

* fix presto ds type in metadata

* fix presto ds type

* clean up debug logging
  • Loading branch information
kstrz authored and villebro committed Jan 25, 2021
1 parent 8b09414 commit 0acd2cc
Show file tree
Hide file tree
Showing 26 changed files with 677 additions and 189 deletions.
60 changes: 43 additions & 17 deletions superset/examples/birth_names.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,32 +108,49 @@ def load_birth_names(
print(f"Creating table [{tbl_name}] reference")
obj = TBL(table_name=tbl_name)
db.session.add(obj)
obj.main_dttm_col = "ds"
obj.database = database
obj.filter_select_enabled = True
obj.fetch_metadata()

if not any(col.column_name == "num_california" for col in obj.columns):
_set_table_metadata(obj, database)
_add_table_metrics(obj)

db.session.commit()

slices, _ = create_slices(obj, admin_owner=True)
create_dashboard(slices)


def _set_table_metadata(datasource: "BaseDatasource", database: "Database") -> None:
datasource.main_dttm_col = "ds" # type: ignore
datasource.database = database
datasource.filter_select_enabled = True
datasource.fetch_metadata()


def _add_table_metrics(datasource: "BaseDatasource") -> None:
if not any(col.column_name == "num_california" for col in datasource.columns):
col_state = str(column("state").compile(db.engine))
col_num = str(column("num").compile(db.engine))
obj.columns.append(
datasource.columns.append(
TableColumn(
column_name="num_california",
expression=f"CASE WHEN {col_state} = 'CA' THEN {col_num} ELSE 0 END",
)
)

if not any(col.metric_name == "sum__num" for col in obj.metrics):
if not any(col.metric_name == "sum__num" for col in datasource.metrics):
col = str(column("num").compile(db.engine))
obj.metrics.append(SqlMetric(metric_name="sum__num", expression=f"SUM({col})"))

db.session.commit()
datasource.metrics.append(
SqlMetric(metric_name="sum__num", expression=f"SUM({col})")
)

slices, _ = create_slices(obj)
create_dashboard(slices)
for col in datasource.columns:
if col.column_name == "ds":
col.is_dttm = True # type: ignore
break


def create_slices(tbl: BaseDatasource) -> Tuple[List[Slice], List[Slice]]:
def create_slices(
tbl: BaseDatasource, admin_owner: bool
) -> Tuple[List[Slice], List[Slice]]:
metrics = [
{
"expressionType": "SIMPLE",
Expand All @@ -160,9 +177,17 @@ def create_slices(tbl: BaseDatasource) -> Tuple[List[Slice], List[Slice]]:
"markup_type": "markdown",
}

slice_props = dict(
datasource_id=tbl.id, datasource_type="table", owners=[admin], created_by=admin
)
if admin_owner:
slice_props = dict(
datasource_id=tbl.id,
datasource_type="table",
owners=[admin],
created_by=admin,
)
else:
slice_props = dict(
datasource_id=tbl.id, datasource_type="table", owners=[], created_by=admin
)

print("Creating some slices")
slices = [
Expand Down Expand Up @@ -475,7 +500,7 @@ def create_slices(tbl: BaseDatasource) -> Tuple[List[Slice], List[Slice]]:
return slices, misc_slices


def create_dashboard(slices: List[Slice]) -> None:
def create_dashboard(slices: List[Slice]) -> Dashboard:
print("Creating a dashboard")

dash = db.session.query(Dashboard).filter_by(slug="births").first()
Expand Down Expand Up @@ -779,3 +804,4 @@ def create_dashboard(slices: List[Slice]) -> None:
dash.position_json = json.dumps(pos, indent=4)
dash.slug = "births"
db.session.commit()
return dash
8 changes: 7 additions & 1 deletion tests/access_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import json
import unittest
from unittest import mock
from tests.fixtures.birth_names_dashboard import load_birth_names_dashboard_with_slices

import pytest

Expand Down Expand Up @@ -142,6 +143,7 @@ def test_override_role_permissions_is_admin_only(self):
)
self.assertNotEqual(405, response.status_code)

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_override_role_permissions_1_table(self):
response = self.client.post(
"/superset/override_role_permissions/",
Expand All @@ -160,6 +162,7 @@ def test_override_role_permissions_1_table(self):
"datasource_access", updated_override_me.permissions[0].permission.name
)

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_override_role_permissions_druid_and_table(self):
response = self.client.post(
"/superset/override_role_permissions/",
Expand Down Expand Up @@ -187,7 +190,9 @@ def test_override_role_permissions_druid_and_table(self):
)
self.assertEqual(3, len(perms))

@pytest.mark.usefixtures("load_energy_table_with_slice")
@pytest.mark.usefixtures(
"load_energy_table_with_slice", "load_birth_names_dashboard_with_slices"
)
def test_override_role_permissions_drops_absent_perms(self):
override_me = security_manager.find_role("override_me")
override_me.permissions.append(
Expand Down Expand Up @@ -247,6 +252,7 @@ def test_clean_requests_after_role_extend(self):
gamma_user = security_manager.find_user(username="gamma")
gamma_user.roles.remove(security_manager.find_role("test_role1"))

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_clean_requests_after_alpha_grant(self):
session = db.session

Expand Down
5 changes: 5 additions & 0 deletions tests/cache_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,12 @@
"""Unit tests for Superset with caching"""
import json

import pytest

from superset import app, db
from superset.extensions import cache_manager
from superset.utils.core import QueryStatus
from tests.fixtures.birth_names_dashboard import load_birth_names_dashboard_with_slices

from .base_tests import SupersetTestCase

Expand All @@ -34,6 +37,7 @@ def tearDown(self):
cache_manager.cache.clear()
cache_manager.data_cache.clear()

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_no_data_cache(self):
data_cache_config = app.config["DATA_CACHE_CONFIG"]
app.config["DATA_CACHE_CONFIG"] = {"CACHE_TYPE": "null"}
Expand All @@ -54,6 +58,7 @@ def test_no_data_cache(self):
self.assertFalse(resp["is_cached"])
self.assertFalse(resp_from_cache["is_cached"])

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_slice_data_cache(self):
# Override cache config
data_cache_config = app.config["DATA_CACHE_CONFIG"]
Expand Down
23 changes: 23 additions & 0 deletions tests/celery_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import time
import unittest.mock as mock
from typing import Optional
from tests.fixtures.birth_names_dashboard import load_birth_names_dashboard_with_slices

import pytest

Expand Down Expand Up @@ -160,6 +161,7 @@ def test_run_sync_query_dont_exist(setup_sqllab, ctas_method):
}


@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW])
def test_run_sync_query_cta(setup_sqllab, ctas_method):
tmp_table_name = f"{TEST_SYNC}_{ctas_method.lower()}"
Expand All @@ -173,7 +175,10 @@ def test_run_sync_query_cta(setup_sqllab, ctas_method):
assert QueryStatus.SUCCESS == results["status"], results
assert len(results["data"]) > 0

delete_tmp_view_or_table(tmp_table_name, ctas_method)


@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_run_sync_query_cta_no_data(setup_sqllab):
sql_empty_result = "SELECT * FROM birth_names WHERE name='random'"
result = run_sql(sql_empty_result)
Expand All @@ -184,6 +189,7 @@ def test_run_sync_query_cta_no_data(setup_sqllab):
assert QueryStatus.SUCCESS == query.status


@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW])
@mock.patch(
"superset.views.core.get_cta_schema_name", lambda d, u, s, sql: CTAS_SCHEMA_NAME
Expand All @@ -208,7 +214,10 @@ def test_run_sync_query_cta_config(setup_sqllab, ctas_method):
results = run_sql(query.select_sql)
assert QueryStatus.SUCCESS == results["status"], result

delete_tmp_view_or_table(f"{CTAS_SCHEMA_NAME}.{tmp_table_name}", ctas_method)


@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW])
@mock.patch(
"superset.views.core.get_cta_schema_name", lambda d, u, s, sql: CTAS_SCHEMA_NAME
Expand All @@ -232,7 +241,10 @@ def test_run_async_query_cta_config(setup_sqllab, ctas_method):
== query.executed_sql
)

delete_tmp_view_or_table(f"{CTAS_SCHEMA_NAME}.{tmp_table_name}", ctas_method)


@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW])
def test_run_async_cta_query(setup_sqllab, ctas_method):
table_name = f"{TEST_ASYNC_CTA}_{ctas_method.lower()}"
Expand All @@ -252,7 +264,10 @@ def test_run_async_cta_query(setup_sqllab, ctas_method):
assert query.select_as_cta
assert query.select_as_cta_used

delete_tmp_view_or_table(table_name, ctas_method)


@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.parametrize("ctas_method", [CtasMethod.TABLE, CtasMethod.VIEW])
def test_run_async_cta_query_with_lower_limit(setup_sqllab, ctas_method):
tmp_table = f"{TEST_ASYNC_LOWER_LIMIT}_{ctas_method.lower()}"
Expand All @@ -272,6 +287,8 @@ def test_run_async_cta_query_with_lower_limit(setup_sqllab, ctas_method):
assert query.select_as_cta
assert query.select_as_cta_used

delete_tmp_view_or_table(tmp_table, ctas_method)


SERIALIZATION_DATA = [("a", 4, 4.0, datetime.datetime(2019, 8, 18, 16, 39, 16, 660000))]
CURSOR_DESCR = (
Expand Down Expand Up @@ -306,6 +323,7 @@ def test_new_data_serialization():
assert isinstance(data[0], bytes)


@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_default_payload_serialization():
use_new_deserialization = False
db_engine_spec = BaseEngineSpec()
Expand Down Expand Up @@ -338,6 +356,7 @@ def test_default_payload_serialization():
assert isinstance(serialized, str)


@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_msgpack_payload_serialization():
use_new_deserialization = True
db_engine_spec = BaseEngineSpec()
Expand Down Expand Up @@ -406,3 +425,7 @@ def my_task():
my_task()
finally:
flask._app_ctx_stack.push(popped_app)


def delete_tmp_view_or_table(name: str, db_object_type: str):
db.get_engine().execute(f"DROP {db_object_type} IF EXISTS {name}")
Loading

0 comments on commit 0acd2cc

Please sign in to comment.