Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add udf support to run/test #851

Merged
merged 5 commits into from
Oct 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions .github/workflows/build-config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,14 @@ jobs:
- name: Run Docker container and execute tests
working-directory: ./sqrl-testing/sqrl-integration-tests/src/test/resources/usecases/conference
run: |
docker run -i -p 8888:8888 -p 8081:8081 --rm -v $PWD:/build sqrl-test test conference.sqrl conference.graphqls --snapshot snapshots-conference --tests tests-conference
docker run -i --rm -v $PWD:/build sqrl-test test conference.sqrl conference.graphqls --snapshot snapshots-conference --tests tests-conference
continue-on-error: false

- name: Test UDF
working-directory: ./sqrl-testing/sqrl-integration-tests/src/test/resources/usecases/udf
run: |
docker run -i --rm -v $PWD:/build sqrl-test test myudf.sqrl --snapshot snapshots-myudf --tests tests-myudf
continue-on-error: false
#
# - name: Run Docker container and execute tests
# working-directory: ./sqrl-testing/sqrl-integration-tests/src/test/resources/usecases/duckdb
# run: |
# docker run -i -p 8888:8888 -p 8081:8081 --rm -v $PWD:/build sqrl-test test duckdb.sqrl --snapshot snapshots-duckdb --tests tests-duckdb
# continue-on-error: false

- name: Check Docker return code
run: |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,9 +161,9 @@ public void testUseCase(UseCaseTestParameter param) {

Map<String, String> env = new HashMap<>();
env.putAll(System.getenv());
env.put("EXECUTION_MODE", "local");
env.putAll(containerHook.getEnv());
env.put("DATA_PATH", rootDir.resolve("build/deploy/flink/data").toAbsolutePath().toString());
env.put("UDF_PATH", rootDir.resolve("build/deploy/flink/lib").toAbsolutePath().toString());

//Run the test
TestEnvContext context = TestEnvContext.builder()
Expand Down Expand Up @@ -229,7 +229,7 @@ public void testUseCase(UseCaseTestParameter param) {
@MethodSource("useCaseProvider")
@Disabled
public void runTestNumber(UseCaseTestParameter param) {
int i = -1;
int i = 31;
testNo++;
System.out.println(testNo + ":" + param);
if (i == testNo) {
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
>>>pipeline_explain.txt
=== MyTable
ID: mytable_1
Type: state
Stage: flink
Primary Key: val
Timestamp : -
Schema:
- val: INTEGER NOT NULL
- myFnc: BIGINT
Plan:
LogicalProject(val=[$0], myFnc=[MyScalarFunction(CAST($0):BIGINT, CAST($0):BIGINT)])
LogicalValues(tuples=[[{ 1 }, { 2 }, { 3 }, { 4 }, { 5 }, { 6 }, { 7 }, { 8 }, { 9 }, { 10 }]])

>>>flink.json
{
"flinkSql" : [
"CREATE TEMPORARY FUNCTION IF NOT EXISTS `myscalarfunction` AS 'com.myudf.MyScalarFunction' LANGUAGE JAVA;",
"CREATE TEMPORARY TABLE `mytable_1` (\n `val` INTEGER NOT NULL,\n `myFnc` BIGINT,\n PRIMARY KEY (`val`) NOT ENFORCED\n) WITH (\n 'password' = '${JDBC_PASSWORD}',\n 'connector' = 'jdbc-sqrl',\n 'driver' = 'org.postgresql.Driver',\n 'table-name' = 'mytable_1',\n 'url' = '${JDBC_URL}',\n 'username' = '${JDBC_USERNAME}'\n);",
"CREATE VIEW `table$1`\nAS\nSELECT `val`, MYSCALARFUNCTION(CAST(`val` AS BIGINT), CAST(`val` AS BIGINT)) AS `myFnc`\nFROM (VALUES (1),\n (2),\n (3),\n (4),\n (5),\n (6),\n (7),\n (8),\n (9),\n (10)) AS `t` (`val`);",
"EXECUTE STATEMENT SET BEGIN\nINSERT INTO `mytable_1`\n(SELECT *\n FROM `table$1`)\n;\nEND;"
],
"connectors" : [
"jdbc-sqrl"
],
"formats" : [ ]
}
>>>kafka.json
{
"topics" : [ ]
}
>>>postgres.json
{
"ddl" : [
{
"name" : "mytable_1",
"columns" : [
"\"val\" INTEGER NOT NULL",
"\"myFnc\" BIGINT "
],
"primaryKeys" : [
"\"val\""
],
"sql" : "CREATE TABLE IF NOT EXISTS mytable_1 (\"val\" INTEGER NOT NULL,\"myFnc\" BIGINT , PRIMARY KEY (\"val\"));"
}
],
"views" : [
{
"name" : "MyTable",
"sql" : "CREATE OR REPLACE VIEW \"MyTable\"(\"val\", \"myFnc\") AS SELECT *\nFROM \"mytable_1\"\nORDER BY \"val\";"
}
]
}
>>>vertx.json
{
"model" : {
"coords" : [
{
"type" : "args",
"parentType" : "Query",
"fieldName" : "MyTable",
"matchs" : [
{
"arguments" : [
{
"type" : "variable",
"type" : "variable",
"path" : "val"
},
{
"type" : "variable",
"type" : "variable",
"path" : "limit"
},
{
"type" : "variable",
"type" : "variable",
"path" : "offset"
}
],
"query" : {
"type" : "PagedJdbcQuery",
"type" : "PagedJdbcQuery",
"sql" : "SELECT *\nFROM \"mytable_1\"\nWHERE \"val\" = $1",
"parameters" : [
{
"type" : "arg",
"type" : "arg",
"path" : "val"
}
]
}
},
{
"arguments" : [
{
"type" : "variable",
"type" : "variable",
"path" : "limit"
},
{
"type" : "variable",
"type" : "variable",
"path" : "offset"
}
],
"query" : {
"type" : "PagedJdbcQuery",
"type" : "PagedJdbcQuery",
"sql" : "SELECT *\nFROM \"mytable_1\"\nORDER BY \"val\"",
"parameters" : [ ]
}
}
]
}
],
"mutations" : [ ],
"subscriptions" : [ ],
"schema" : {
"type" : "string",
"type" : "string",
"schema" : "\"An RFC-3339 compliant DateTime Scalar\"\nscalar DateTime\n\ntype MyTable {\n val: Int!\n myFnc: Float\n}\n\ntype Query {\n MyTable(val: Int, limit: Int = 10, offset: Int = 0): [MyTable!]\n}\n"
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
>>>MyTable.graphql
{"data":{"MyTable":[{"val":1,"myFnc":2.0},{"val":2,"myFnc":4.0},{"val":3,"myFnc":6.0},{"val":4,"myFnc":8.0},{"val":5,"myFnc":10.0},{"val":6,"myFnc":12.0},{"val":7,"myFnc":14.0},{"val":8,"myFnc":16.0},{"val":9,"myFnc":18.0},{"val":10,"myFnc":20.0}]}}
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"data":{"MyTable":[{"val":1,"myFnc":2.0},{"val":2,"myFnc":4.0},{"val":3,"myFnc":6.0},{"val":4,"myFnc":8.0},{"val":5,"myFnc":10.0},{"val":6,"myFnc":12.0},{"val":7,"myFnc":14.0},{"val":8,"myFnc":16.0},{"val":9,"myFnc":18.0},{"val":10,"myFnc":20.0}]}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
query MyTable {
MyTable {
val
myFnc
}
}
1 change: 1 addition & 0 deletions sqrl-tools/dockerrun.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ cd /build

# Todo: there is a target flag we need to parse and set
export DATA_PATH=/build/build/deploy/flink/data
export UDF_PATH=/build/build/deploy/flink/lib

echo 'Compiling...this takes about 10 seconds'
java -jar /opt/sqrl/sqrl-cli.jar ${@}
Expand Down
Loading
Loading