Skip to content

Commit

Permalink
GEN-1192 - Move Test Case to its Own Resource (#17862)
Browse files Browse the repository at this point in the history
* feat: indexed test case results

* feat: added indexation logic for test case results

* style: ran java linting

* fix: IDE warnigns

* chore: added test case results migration

* style: ran java linting

* fix: postgres migration column json ref

* empty commit to trigger queued

* chore: extracted test case results to its own resource

* chore: fix failing tests

* chore: move testCaseResult state from testSuite and testCase to dynamic field fetched from test case results search index

* chore: clean up test case repository

* style: ran java linting

* chore: removed testCaseResultSummary and testCaseResult state from db

* fix: test failures

* chore: fix index mapping type for result value

* chore: fix test failure
  • Loading branch information
TeddyCr authored Sep 18, 2024
1 parent 55a0272 commit 33c50ef
Show file tree
Hide file tree
Showing 29 changed files with 2,577 additions and 1,361 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,18 @@ ADD COLUMN id VARCHAR(36) GENERATED ALWAYS AS (json ->> '$.id') STORED NOT NULL,
ADD CONSTRAINT UNIQUE (id);

-- Create index on id column
CREATE INDEX data_quality_data_time_series_id_index ON data_quality_data_time_series (id);
CREATE INDEX data_quality_data_time_series_id_index ON data_quality_data_time_series (id);

-- Remove VIRTUAL status column from test_case table and remove
-- testCaseResult state from testCase; fetch from search repo.
ALTER TABLE test_case DROP COLUMN status;
UPDATE test_case SET json = JSON_SET(json, '$.testCaseStatus', JSON_EXTRACT(json, '$.testCaseResult.testCaseStatus'));
ALTER TABLE test_case ADD COLUMN status VARCHAR(56) GENERATED ALWAYS AS (JSON_UNQUOTE(JSON_EXTRACT(json, '$.testCaseStatus'))) STORED;


-- Remove test case result states
UPDATE test_suite
SET json = JSON_REMOVE(json, '$.testCaseResultSummary');

UPDATE test_case
SET json = JSON_REMOVE(json, '$.testCaseResult');
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ SET json = jsonb_set(
FROM test_case tc
WHERE dqdts.entityfqnHash = tc.fqnHash;


-- Add id column to data_quality_data_time_series table
-- after we have added the id values to the records
ALTER TABLE data_quality_data_time_series
Expand All @@ -15,3 +16,17 @@ ADD CONSTRAINT id_unique UNIQUE (id);

-- Create index on id column
CREATE INDEX IF NOT EXISTS data_quality_data_time_series_id_index ON data_quality_data_time_series (id);

-- Remove VIRTUAL status column from test_case table and remove
-- testCaseResult state from testCase; fetch from search repo.
ALTER TABLE test_case DROP COLUMN status;
UPDATE test_case SET json = jsonb_set(json, '{testCaseStatus}', json->'testCaseResult'->'testCaseStatus');
ALTER TABLE test_case ADD COLUMN status VARCHAR(56) GENERATED ALWAYS AS (json ->> 'testCaseStatus') STORED NULL;


-- Remove test case result states
UPDATE test_suite
SET json = json - 'testCaseResultSummary';

UPDATE test_case
SET json = json - 'testCaseResult';
19 changes: 19 additions & 0 deletions ingestion/tests/integration/data_quality/test_data_diff.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import sys
from datetime import datetime

import pytest
from pydantic import BaseModel
Expand Down Expand Up @@ -67,6 +68,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
failedRows=0,
passedRows=599,
Expand All @@ -85,6 +87,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
failedRows=321,
passedRows=278,
Expand All @@ -99,6 +102,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
failedRows=321,
passedRows=278,
Expand All @@ -114,6 +118,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
failedRows=321,
),
Expand All @@ -128,6 +133,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
failedRows=321,
),
Expand All @@ -146,6 +152,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -158,6 +165,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.customer_without_first_name",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
testResultValue=[
TestResultValue(name="removedColumns", value="1"),
Expand All @@ -179,6 +187,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.customer_without_first_name",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand Down Expand Up @@ -208,6 +217,7 @@ def __init__(self, *args, **kwargs):
),
"MYSQL_SERVICE.default.test.customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -220,6 +230,7 @@ def __init__(self, *args, **kwargs):
),
"MYSQL_SERVICE.default.test.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand Down Expand Up @@ -290,6 +301,9 @@ def test_happy_paths(
fields=["*"],
)
assert "ERROR: Unexpected error" not in test_case_entity.testCaseResult.result
parameters.expected.timestamp = (
test_case_entity.testCaseResult.timestamp
) # timestamp is not deterministic
assert_equal_pydantic_objects(parameters.expected, test_case_entity.testCaseResult)


Expand All @@ -313,6 +327,7 @@ def test_happy_paths(
],
),
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Aborted,
result="Unsupported dialect in param table2.serviceUrl: mongodb",
),
Expand All @@ -331,6 +346,7 @@ def test_happy_paths(
],
),
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
result="Tables have 1 different columns:"
"\n Changed columns:"
Expand Down Expand Up @@ -405,6 +421,9 @@ def test_error_paths(
test_case_entity: TestCase = metadata.get_or_create_test_case(
f"{table1.fullyQualifiedName.root}.{parameters.name}"
)
expected.timestamp = (
test_case_entity.testCaseResult.timestamp
) # timestamp is not deterministic
assert_equal_pydantic_objects(expected, test_case_entity.testCaseResult)


Expand Down
11 changes: 11 additions & 0 deletions ingestion/tests/integration/mysql/test_data_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand All @@ -86,6 +87,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand All @@ -101,6 +103,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand All @@ -116,6 +119,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -131,6 +135,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -144,6 +149,7 @@ class TestColumnParameter:
parameterValues=[],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -162,6 +168,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -180,6 +187,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand Down Expand Up @@ -216,6 +224,9 @@ def test_column_test_cases(
nullable=False,
)
cleanup_fqns(TestCase, test_case.fullyQualifiedName.root)
parameters.expected_result.timestamp = (
test_case.testCaseResult.timestamp
) # timestamp is not deterministic
assert_equal_pydantic_objects(
parameters.expected_result,
test_case.testCaseResult,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,8 @@ public final class Entity {

public static final String FIELD_DISABLED = "disabled";

public static final String FIELD_TEST_SUITES = "testSuites";

//
// Service entities
//
Expand Down Expand Up @@ -215,7 +217,7 @@ public final class Entity {
// Time series entities
public static final String ENTITY_REPORT_DATA = "entityReportData";
public static final String TEST_CASE_RESOLUTION_STATUS = "testCaseResolutionStatus";
public static final String TEST_CASE_RESULTS = "testCaseResult";
public static final String TEST_CASE_RESULT = "testCaseResult";
public static final String WEB_ANALYTIC_ENTITY_VIEW_REPORT_DATA =
"webAnalyticEntityViewReportData";
public static final String WEB_ANALYTIC_USER_ACTIVITY_REPORT_DATA =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,9 @@ public interface CollectionDAO {
@CreateSqlObject
TestCaseResolutionStatusTimeSeriesDAO testCaseResolutionStatusTimeSeriesDao();

@CreateSqlObject
TestCaseResultTimeSeriesDAO testCaseResultTimeSeriesDao();

@CreateSqlObject
RoleDAO roleDAO();

Expand Down Expand Up @@ -4431,6 +4434,47 @@ default List<String> listWithOffset(
}
}

interface TestCaseResultTimeSeriesDAO extends EntityTimeSeriesDAO {
@Override
default String getTimeSeriesTableName() {
return "data_quality_data_time_series";
}

@ConnectionAwareSqlUpdate(
value =
"INSERT INTO data_quality_data_time_series(entityFQNHash, extension, jsonSchema, json, incidentId) "
+ "VALUES (:testCaseFQNHash, :extension, :jsonSchema, :json, :incidentStateId)",
connectionType = MYSQL)
@ConnectionAwareSqlUpdate(
value =
"INSERT INTO data_quality_data_time_series(entityFQNHash, extension, jsonSchema, json, incidentId) "
+ "VALUES (:testCaseFQNHash, :extension, :jsonSchema, (:json :: jsonb), :incidentStateId)",
connectionType = POSTGRES)
void insert(
@Define("table") String table,
@BindFQN("testCaseFQNHash") String testCaseFQNHash,
@Bind("extension") String extension,
@Bind("jsonSchema") String jsonSchema,
@Bind("json") String json,
@Bind("incidentStateId") String incidentStateId);

default void insert(
String testCaseFQN,
String extension,
String jsonSchema,
String json,
UUID incidentStateId) {

insert(
getTimeSeriesTableName(),
testCaseFQN,
extension,
jsonSchema,
json,
incidentStateId != null ? incidentStateId.toString() : null);
}
}

class EntitiesCountRowMapper implements RowMapper<EntitiesCount> {
@Override
public EntitiesCount map(ResultSet rs, StatementContext ctx) throws SQLException {
Expand Down
Loading

0 comments on commit 33c50ef

Please sign in to comment.