Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

GEN-1192 - Move Test Case to its Own Resource #17862

Merged
merged 39 commits into from
Sep 18, 2024
Merged
Show file tree
Hide file tree
Changes from 35 commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
182c23b
feat: indexed test case results
TeddyCr Sep 3, 2024
2cac2af
feat: added indexation logic for test case results
TeddyCr Sep 4, 2024
bba9863
style: ran java linting
TeddyCr Sep 4, 2024
2867b9a
Merge remote-tracking branch 'upstream/main' into GEN-1213
TeddyCr Sep 4, 2024
6d4f11c
fix: IDE warnigns
TeddyCr Sep 4, 2024
396b37f
chore: added test case results migration
TeddyCr Sep 4, 2024
121d91e
Merge remote-tracking branch 'upstream/main' into GEN-1213
TeddyCr Sep 4, 2024
1fd5bc9
style: ran java linting
TeddyCr Sep 4, 2024
fb0f43d
fix: postgres migration column json ref
TeddyCr Sep 5, 2024
7086ea0
Merge branch 'main' into GEN-1213
TeddyCr Sep 5, 2024
c728578
empty commit to trigger queued
TeddyCr Sep 5, 2024
3b1cc4c
Merge remote-tracking branch 'upstream/main' into GEN-1213
TeddyCr Sep 5, 2024
87c366d
Merge remote-tracking branch 'upstream/main' into GEN-1213
TeddyCr Sep 5, 2024
930d039
Merge remote-tracking branch 'upstream/main' into GEN-1213
TeddyCr Sep 6, 2024
4381155
Merge remote-tracking branch 'upstream/main' into GEN-1213
TeddyCr Sep 6, 2024
72f4a2c
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 6, 2024
ae8fe82
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 9, 2024
acbfb63
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 10, 2024
b111f4e
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 10, 2024
527e059
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 10, 2024
08ec447
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 11, 2024
75ea6ae
chore: extracted test case results to its own resource
TeddyCr Sep 12, 2024
695953d
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 12, 2024
24b9b60
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 13, 2024
52e8169
chore: fix failing tests
TeddyCr Sep 13, 2024
ee462b9
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 13, 2024
6cecfe1
chore: move testCaseResult state from testSuite and testCase to dynam…
TeddyCr Sep 16, 2024
cb63b65
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 16, 2024
27417e1
chore: clean up test case repository
TeddyCr Sep 16, 2024
c7136e6
style: ran java linting
TeddyCr Sep 16, 2024
b92ef8a
chore: removed testCaseResultSummary and testCaseResult state from db
TeddyCr Sep 16, 2024
abfc1c0
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 16, 2024
e11a2f0
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 16, 2024
1aae6c5
fix: test failures
TeddyCr Sep 17, 2024
52788fe
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 17, 2024
092dc50
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 17, 2024
ba3e5cb
chore: fix index mapping type for result value
TeddyCr Sep 17, 2024
ee0b57e
chore: fix test failure
TeddyCr Sep 17, 2024
8b51dfe
Merge remote-tracking branch 'upstream/main' into GEN-1192
TeddyCr Sep 17, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,18 @@ ADD COLUMN id VARCHAR(36) GENERATED ALWAYS AS (json ->> '$.id') STORED NOT NULL,
ADD CONSTRAINT UNIQUE (id);

-- Create index on id column
CREATE INDEX data_quality_data_time_series_id_index ON data_quality_data_time_series (id);
CREATE INDEX data_quality_data_time_series_id_index ON data_quality_data_time_series (id);

-- Remove VIRTUAL status column from test_case table and remove
-- testCaseResult state from testCase; fetch from search repo.
ALTER TABLE test_case DROP COLUMN status;
UPDATE test_case SET json = JSON_SET(json, '$.testCaseStatus', JSON_EXTRACT(json, '$.testCaseResult.testCaseStatus'));
ALTER TABLE test_case ADD COLUMN status VARCHAR(56) GENERATED ALWAYS AS (JSON_UNQUOTE(JSON_EXTRACT(json, '$.testCaseStatus'))) STORED;


-- Remove test case result states
UPDATE test_suite
SET json = JSON_REMOVE(json, '$.testCaseResultSummary');

UPDATE test_case
SET json = JSON_REMOVE(json, '$.testCaseResult');
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ SET json = jsonb_set(
FROM test_case tc
WHERE dqdts.entityfqnHash = tc.fqnHash;


-- Add id column to data_quality_data_time_series table
-- after we have added the id values to the records
ALTER TABLE data_quality_data_time_series
Expand All @@ -15,3 +16,17 @@ ADD CONSTRAINT id_unique UNIQUE (id);

-- Create index on id column
CREATE INDEX IF NOT EXISTS data_quality_data_time_series_id_index ON data_quality_data_time_series (id);

-- Remove VIRTUAL status column from test_case table and remove
-- testCaseResult state from testCase; fetch from search repo.
ALTER TABLE test_case DROP COLUMN status;
UPDATE test_case SET json = jsonb_set(json, '{testCaseStatus}', json->'testCaseResult'->'testCaseStatus');
ALTER TABLE test_case ADD COLUMN status VARCHAR(56) GENERATED ALWAYS AS (json ->> 'testCaseStatus') STORED NULL;


-- Remove test case result states
UPDATE test_suite
SET json = json - 'testCaseResultSummary';

UPDATE test_case
SET json = json - 'testCaseResult';
19 changes: 19 additions & 0 deletions ingestion/tests/integration/data_quality/test_data_diff.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import sys
from datetime import datetime

import pytest
from pydantic import BaseModel
Expand Down Expand Up @@ -67,6 +68,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
failedRows=0,
passedRows=599,
Expand All @@ -85,6 +87,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
failedRows=321,
passedRows=278,
Expand All @@ -99,6 +102,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
failedRows=321,
passedRows=278,
Expand All @@ -114,6 +118,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
failedRows=321,
),
Expand All @@ -128,6 +133,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
failedRows=321,
),
Expand All @@ -146,6 +152,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -158,6 +165,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.customer_without_first_name",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
testResultValue=[
TestResultValue(name="removedColumns", value="1"),
Expand All @@ -179,6 +187,7 @@ def __init__(self, *args, **kwargs):
),
"POSTGRES_SERVICE.dvdrental.public.customer_without_first_name",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand Down Expand Up @@ -208,6 +217,7 @@ def __init__(self, *args, **kwargs):
),
"MYSQL_SERVICE.default.test.customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -220,6 +230,7 @@ def __init__(self, *args, **kwargs):
),
"MYSQL_SERVICE.default.test.changed_customer",
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand Down Expand Up @@ -290,6 +301,9 @@ def test_happy_paths(
fields=["*"],
)
assert "ERROR: Unexpected error" not in test_case_entity.testCaseResult.result
parameters.expected.timestamp = (
test_case_entity.testCaseResult.timestamp
) # timestamp is not deterministic
assert_equal_pydantic_objects(parameters.expected, test_case_entity.testCaseResult)


Expand All @@ -313,6 +327,7 @@ def test_happy_paths(
],
),
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Aborted,
result="Unsupported dialect in param table2.serviceUrl: mongodb",
),
Expand All @@ -331,6 +346,7 @@ def test_happy_paths(
],
),
TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
result="Tables have 1 different columns:"
"\n Changed columns:"
Expand Down Expand Up @@ -405,6 +421,9 @@ def test_error_paths(
test_case_entity: TestCase = metadata.get_or_create_test_case(
f"{table1.fullyQualifiedName.root}.{parameters.name}"
)
expected.timestamp = (
test_case_entity.testCaseResult.timestamp
) # timestamp is not deterministic
assert_equal_pydantic_objects(expected, test_case_entity.testCaseResult)


Expand Down
8 changes: 8 additions & 0 deletions ingestion/tests/integration/mysql/test_data_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand All @@ -86,6 +87,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand All @@ -101,6 +103,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand All @@ -116,6 +119,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -131,6 +135,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -144,6 +149,7 @@ class TestColumnParameter:
parameterValues=[],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -162,6 +168,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Success,
),
),
Expand All @@ -180,6 +187,7 @@ class TestColumnParameter:
],
),
expected_result=TestCaseResult(
timestamp=int(datetime.now().timestamp() * 1000),
testCaseStatus=TestCaseStatus.Failed,
),
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,8 @@ public final class Entity {

public static final String FIELD_DISABLED = "disabled";

public static final String FIELD_TEST_SUITES = "testSuites";

//
// Service entities
//
Expand Down Expand Up @@ -215,7 +217,7 @@ public final class Entity {
// Time series entities
public static final String ENTITY_REPORT_DATA = "entityReportData";
public static final String TEST_CASE_RESOLUTION_STATUS = "testCaseResolutionStatus";
public static final String TEST_CASE_RESULTS = "testCaseResult";
public static final String TEST_CASE_RESULT = "testCaseResult";
public static final String WEB_ANALYTIC_ENTITY_VIEW_REPORT_DATA =
"webAnalyticEntityViewReportData";
public static final String WEB_ANALYTIC_USER_ACTIVITY_REPORT_DATA =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,9 @@ public interface CollectionDAO {
@CreateSqlObject
TestCaseResolutionStatusTimeSeriesDAO testCaseResolutionStatusTimeSeriesDao();

@CreateSqlObject
TestCaseResultTimeSeriesDAO testCaseResultTimeSeriesDao();

@CreateSqlObject
RoleDAO roleDAO();

Expand Down Expand Up @@ -4431,6 +4434,47 @@ default List<String> listWithOffset(
}
}

interface TestCaseResultTimeSeriesDAO extends EntityTimeSeriesDAO {
@Override
default String getTimeSeriesTableName() {
return "data_quality_data_time_series";
}

@ConnectionAwareSqlUpdate(
value =
"INSERT INTO data_quality_data_time_series(entityFQNHash, extension, jsonSchema, json, incidentId) "
+ "VALUES (:testCaseFQNHash, :extension, :jsonSchema, :json, :incidentStateId)",
connectionType = MYSQL)
@ConnectionAwareSqlUpdate(
value =
"INSERT INTO data_quality_data_time_series(entityFQNHash, extension, jsonSchema, json, incidentId) "
+ "VALUES (:testCaseFQNHash, :extension, :jsonSchema, (:json :: jsonb), :incidentStateId)",
connectionType = POSTGRES)
void insert(
@Define("table") String table,
@BindFQN("testCaseFQNHash") String testCaseFQNHash,
@Bind("extension") String extension,
@Bind("jsonSchema") String jsonSchema,
@Bind("json") String json,
@Bind("incidentStateId") String incidentStateId);

default void insert(
String testCaseFQN,
String extension,
String jsonSchema,
String json,
UUID incidentStateId) {

insert(
getTimeSeriesTableName(),
testCaseFQN,
extension,
jsonSchema,
json,
incidentStateId != null ? incidentStateId.toString() : null);
}
}

class EntitiesCountRowMapper implements RowMapper<EntitiesCount> {
@Override
public EntitiesCount map(ResultSet rs, StatementContext ctx) throws SQLException {
Expand Down
Loading
Loading