Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci: speed up ci by introducing precompiled slt binary #2363

Merged
merged 29 commits into from
Jan 8, 2024
Merged
Show file tree
Hide file tree
Changes from 22 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
9bde203
ci: attempt to speed up ci by precompiling slt binary
universalmind303 Jan 5, 2024
2cf7dcd
don't get from cache for ~/.cargo
universalmind303 Jan 5, 2024
582e0d2
wip: ci stuff
universalmind303 Jan 5, 2024
f23aa8e
wip: ci stuff
universalmind303 Jan 5, 2024
85e2860
wip: ci stuff
universalmind303 Jan 5, 2024
78c51df
wip: ci stuff
universalmind303 Jan 5, 2024
c21c1de
wip: multiple caches?
universalmind303 Jan 5, 2024
989d1b8
include less stuff when hydrating cache
universalmind303 Jan 5, 2024
8669e4a
more caching optimizations
universalmind303 Jan 5, 2024
a25734b
more caching optimizations
universalmind303 Jan 5, 2024
bd86a50
more optimizations
universalmind303 Jan 5, 2024
c1c2e03
more optimizations
universalmind303 Jan 6, 2024
1a5d9b3
add sqlserver back in
universalmind303 Jan 6, 2024
99117f6
pull out mongo and mysql
universalmind303 Jan 6, 2024
419c0bf
use a matrix for the slt datasource integrations
universalmind303 Jan 6, 2024
c9810ba
fix matrix
universalmind303 Jan 6, 2024
5375aeb
fix matrix
universalmind303 Jan 6, 2024
bbd3839
put tunnel tests behind pg flag
universalmind303 Jan 6, 2024
76ab5d8
dont run tunnels on non pg protocol
universalmind303 Jan 6, 2024
985361f
whoops
universalmind303 Jan 6, 2024
d5368e5
Merge branch 'main' of github.com:GlareDB/glaredb into universalmind3…
universalmind303 Jan 8, 2024
c2e4deb
pr feedback
universalmind303 Jan 8, 2024
7ebac46
revert back to relative paths & drop cargo test semantics
universalmind303 Jan 8, 2024
7aaeaeb
Merge branch 'main' into universalmind303/slt-ci
universalmind303 Jan 8, 2024
8ac7d42
fix paths
universalmind303 Jan 8, 2024
fd440db
remove test from "testing" crate
universalmind303 Jan 8, 2024
1bb74c0
Merge branch 'main' of github.com:GlareDB/glaredb into universalmind3…
universalmind303 Jan 8, 2024
956e853
fix paths
universalmind303 Jan 8, 2024
7271ba5
Merge branch 'main' into universalmind303/slt-ci
universalmind303 Jan 8, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
270 changes: 189 additions & 81 deletions .github/workflows/ci.yaml

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion crates/datasources/src/bson/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ impl RecordStructBuilder {
.field_index
.get(key)
.ok_or_else(|| BsonError::ColumnNotInInferredSchema(key.to_string()))?;
println!("{}->{}", key, idx);

if *cols_set.get(idx).unwrap() {
continue;
Expand Down
4 changes: 4 additions & 0 deletions crates/testing/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,7 @@ rpcsrv = { path = "../rpcsrv" }
harness = false
name = "sqllogictests"
path = "tests/sqllogictests/main.rs"

[[bin]]
name = "slt"
path = "src/main.rs"
20 changes: 20 additions & 0 deletions crates/testing/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
use std::sync::Arc;

use testing::slt::{
hooks::{AllTestsHook, SshTunnelHook},
runner::SltRunner,
tests::{PgBinaryEncoding, SshKeysTest},
};

pub fn main() -> anyhow::Result<()> {
SltRunner::new()
.test_files_dir("testdata")?
// Rust tests
.test("sqllogictests/ssh_keys", Box::new(SshKeysTest))?
.test("pgproto/binary_encoding", Box::new(PgBinaryEncoding))?
// Add hooks
.hook("*", Arc::new(AllTestsHook))?
// SSH Tunnels hook
.hook("*/tunnels/ssh", Arc::new(SshTunnelHook))?
.run()
}
4 changes: 3 additions & 1 deletion crates/testing/src/slt.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
//! Utility to run SQL Logic Tests.

mod cli;
pub mod cli;
pub mod hooks;
pub mod runner;
mod test;
pub mod tests;
3 changes: 2 additions & 1 deletion crates/testing/src/slt/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,8 @@ impl Cli {
let mut tests: Vec<_> = if let Some(patterns) = &self.tests_pattern {
let patterns = patterns
.iter()
.map(|p| glob::Pattern::new(p))
.map(|p| p.trim_end_matches(".slt"))
universalmind303 marked this conversation as resolved.
Show resolved Hide resolved
.map(glob::Pattern::new)
.collect::<Result<Vec<_>, _>>()?;

tests
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use std::{collections::HashMap, time::Duration};

use anyhow::{anyhow, Result};
use async_trait::async_trait;
use testing::slt::runner::{Hook, TestClient};
use tokio::{
net::TcpListener,
process::Command,
Expand All @@ -11,6 +10,8 @@ use tokio::{
use tokio_postgres::{Client, Config};
use tracing::warn;

use super::test::{Hook, TestClient};

/// This [`Hook`] is used to set some local variables that might change for
/// each test.
pub struct AllTestsHook;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@ use std::collections::HashMap;

use anyhow::{anyhow, Result};
use async_trait::async_trait;
use testing::slt::runner::{FnTest, TestClient};
use tokio_postgres::Config;
use tracing::warn;

use super::test::{FnTest, TestClient};

macro_rules! test_assert {
($e:expr, $err:expr) => {
if !($e) {
Expand Down
11 changes: 5 additions & 6 deletions crates/testing/tests/sqllogictests/main.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
mod hooks;
mod tests;

use anyhow::Result;
use hooks::{AllTestsHook, SshTunnelHook};
use std::sync::Arc;
use testing::slt::runner::SltRunner;
use tests::{PgBinaryEncoding, SshKeysTest};
use testing::slt::{
hooks::{AllTestsHook, SshTunnelHook},
runner::SltRunner,
tests::{PgBinaryEncoding, SshKeysTest},
};

fn main() -> Result<()> {
SltRunner::new()
Expand Down
12 changes: 10 additions & 2 deletions justfile
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@ sql-logic-tests *args: protoc
just test --test sqllogictests -- {{args}}

# Run SQL Logic Tests over RPC
rpc-tests: protoc
just sql-logic-tests --protocol=rpc \
rpc-tests:
just slt-bin --protocol=rpc \
'sqllogictests/cast/*' \
'sqllogictests/cte/*' \
'sqllogictests/functions/delta_scan' \
Expand Down Expand Up @@ -113,6 +113,14 @@ rpc-tests: protoc
'sqllogictests/describe_rpc' \
'sqllogictests/allowed_operations'

# Build a pre-compiled slt runner
build-slt *args:
cargo build --bin slt -- {{args}}

# Run SQL Logic Tests with a pre-compiled slt runner
slt-bin *args:
universalmind303 marked this conversation as resolved.
Show resolved Hide resolved
./target/debug/slt {{args}}

# Check formatting.
fmt-check: protoc
cargo fmt --check
Expand Down
4 changes: 2 additions & 2 deletions testdata/sqllogictests/catalog/functions.slt
Original file line number Diff line number Diff line change
Expand Up @@ -54,5 +54,5 @@ select
from glare_catalog.functions
where function_name = 'read_parquet';
----
read_parquet table Utf8/List<Utf8> t SELECT * FROM read_parquet('./my_data.parquet') Returns a table by scanning the given Parquet file(s).
read_parquet table Utf8/List<Utf8> t SELECT * FROM read_parquet('./my_data.parquet') Returns a table by scanning the given Parquet file(s).
read_parquet table Utf8/List<Utf8> t SELECT * FROM read_parquet('./my_data.parquet') Returns a table by scanning the given Parquet file(s).
read_parquet table Utf8/List<Utf8> t SELECT * FROM read_parquet('./my_data.parquet') Returns a table by scanning the given Parquet file(s).
universalmind303 marked this conversation as resolved.
Show resolved Hide resolved
10 changes: 5 additions & 5 deletions testdata/sqllogictests/create_table.slt
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ select * from ctas1;
1

statement ok
create table ctas2 as select * from '../../testdata/parquet/userdata1.parquet';
create table ctas2 as select * from '${PWD}/testdata/parquet/userdata1.parquet';
universalmind303 marked this conversation as resolved.
Show resolved Hide resolved

query I
select id from ctas2 order by id limit 1;
Expand Down Expand Up @@ -82,16 +82,16 @@ select count(*) from glare_catalog.tables where builtin = false and table_name =

#2034 case sensitive table names
statement ok
create table case_sensitive as select * from '../../testdata/csv/case_sensitive_columns.csv';
create table case_sensitive as select * from '${PWD}/testdata/csv/case_sensitive_columns.csv';

statement error Duplicate name: case_sensitive
create table "case_sensitive" as select * from '../../testdata/csv/case_sensitive_columns.csv';
create table "case_sensitive" as select * from '${PWD}/testdata/csv/case_sensitive_columns.csv';

statement ok
create table "Case_Sensitive" as select * from '../../testdata/csv/case_sensitive_columns.csv';
create table "Case_Sensitive" as select * from '${PWD}/testdata/csv/case_sensitive_columns.csv';

statement ok
create table "Case Sensitive" as select * from '../../testdata/csv/case_sensitive_columns.csv';
create table "Case Sensitive" as select * from '${PWD}/testdata/csv/case_sensitive_columns.csv';

query I rowsort
select name from case_sensitive
Expand Down
6 changes: 3 additions & 3 deletions testdata/sqllogictests/csv.slt
Original file line number Diff line number Diff line change
Expand Up @@ -23,20 +23,20 @@ select count(*), status from bikeshare_stations group by status order by status;
# Empty column name (#1750)

query ITT rowsort
select * from '../../testdata/csv/empty_col.csv'
select * from '${PWD}/testdata/csv/empty_col.csv'
----
0 a hello
1 b world

query T rowsort
select col1 from '../../testdata/csv/empty_col.csv'
select col1 from '${PWD}/testdata/csv/empty_col.csv'
----
a
b

# Weird, but it works
query T rowsort
select "" from '../../testdata/csv/empty_col.csv'
select "" from '${PWD}/testdata/csv/empty_col.csv'
----
0
1
2 changes: 1 addition & 1 deletion testdata/sqllogictests/external_table.slt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ statement ok
CREATE OR REPLACE EXTERNAL TABLE T1 FROM DEBUG OPTIONS (TABLE_TYPE = 'never_ending');

statement ok
CrEaTe ExTeRnAl TaBlE if not exists SuppLIER fRoM lOcAl (LoCaTiOn '../../testdata/parquet/userdata1.parquet');
CrEaTe ExTeRnAl TaBlE if not exists SuppLIER fRoM lOcAl (LoCaTiOn '${PWD}/testdata/parquet/userdata1.parquet');

statement ok
drop table supplier;
Expand Down
2 changes: 1 addition & 1 deletion testdata/sqllogictests/functions/csv_scan.slt
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ select count(*) from csv_scan(

# Relative path
query I
select count(*) from csv_scan('../../testdata/sqllogictests_datasources_common/data/bikeshare_stations.csv')
select count(*) from csv_scan('./testdata/sqllogictests_datasources_common/data/bikeshare_stations.csv')
----
102

Expand Down
2 changes: 1 addition & 1 deletion testdata/sqllogictests/functions/delta_scan.slt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ select * from delta_scan('file://${PWD}/testdata/delta/table1') order by a;

# Relative path
query IT
select * from delta_scan('../../testdata/delta/table1/') order by a;
select * from delta_scan('./testdata/delta/table1/') order by a;
----
1 hello
2 world
Expand Down
2 changes: 1 addition & 1 deletion testdata/sqllogictests/functions/json_scan.slt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ select count(*) from ndjson_scan('file://${PWD}/testdata/sqllogictests_datasourc

# # Relative path
query I
select count(*) from ndjson_scan('../../testdata/sqllogictests_datasources_common/data/bikeshare_stations.ndjson')
select count(*) from ndjson_scan('./testdata/sqllogictests_datasources_common/data/bikeshare_stations.ndjson')
----
102

Expand Down
2 changes: 1 addition & 1 deletion testdata/sqllogictests/functions/lance_scan.slt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ select * from lance_scan('file://${PWD}/testdata/lance/table1') order by point.l

# Relative path
query IT
select * from lance_scan('../../testdata/lance/table1/') order by point.lat;
select * from lance_scan('.//testdata/lance/table1/') order by point.lat;
----
0.2,1.8 {lat:42.1,long:-74.1}
1.1,1.2 {lat:45.5,long:-122.7}
Expand Down
6 changes: 3 additions & 3 deletions testdata/sqllogictests/functions/parquet_scan.slt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ select count(*) from parquet_scan('file://${PWD}/testdata/parquet/userdata1.parq

# Relative path
query I
select count(*) from parquet_scan('../../testdata/parquet/userdata1.parquet')
select count(*) from parquet_scan('./testdata/parquet/userdata1.parquet')
----
1000

Expand Down Expand Up @@ -45,14 +45,14 @@ select * from parquet_scan('./testdata/parquet/userdata1.paruqet');
# Ambiguous name.
# query I
# select count(*)
# from parquet_scan('../../testdata/parquet/userdata1.parquet') p
# from parquet_scan('${PWD}/testdata/parquet/userdata1.parquet') p
# inner join (values ('Sweden')) as c(country) on p.country = c.country
# ----
# 1000

# query I
# select count(*)
# from parquet_scan('../../testdata/parquet/userdata1.parquet') p
# from parquet_scan('${PWD}/testdata/parquet/userdata1.parquet') p
# inner join (select 'Sweden') as c(country) on p.country = c.country
# ----
# 1000
6 changes: 3 additions & 3 deletions testdata/sqllogictests/functions/read_csv.slt
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ select count(*) from read_csv(

# Relative path
query I
select count(*) from read_csv('../../testdata/sqllogictests_datasources_common/data/bikeshare_stations.csv')
select count(*) from read_csv('./testdata/sqllogictests_datasources_common/data/bikeshare_stations.csv')
----
102

Expand Down Expand Up @@ -66,11 +66,11 @@ select * from read_csv(
# Alternative delimiters

query ITR
select * from read_csv('../../testdata/csv/delimiter.csv', delimiter => ';');
select * from read_csv('./testdata/csv/delimiter.csv', delimiter => ';');
----
1 hello, world 3.9
2 HELLO, WORLD 4.9

# Invalid delimiter (longer than one byte)
statement error delimiters for CSV must fit in one byte \(e.g. ','\)
select * from read_csv('../../testdata/csv/delimiter.csv', delimiter => ';;');
select * from read_csv('./testdata/csv/delimiter.csv', delimiter => ';;');
2 changes: 1 addition & 1 deletion testdata/sqllogictests/functions/read_json.slt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ select count(*) from read_ndjson('file://${PWD}/testdata/sqllogictests_datasourc

# # Relative path
query I
select count(*) from read_ndjson('../../testdata/sqllogictests_datasources_common/data/bikeshare_stations.ndjson')
select count(*) from read_ndjson('./testdata/sqllogictests_datasources_common/data/bikeshare_stations.ndjson')
----
102

Expand Down
6 changes: 3 additions & 3 deletions testdata/sqllogictests/functions/read_parquet.slt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ select count(*) from read_parquet('file://${PWD}/testdata/parquet/userdata1.parq

# Relative path
query I
select count(*) from read_parquet('../../testdata/parquet/userdata1.parquet')
select count(*) from read_parquet('./testdata/parquet/userdata1.parquet')
----
1000

Expand Down Expand Up @@ -45,14 +45,14 @@ select * from read_parquet('./testdata/parquet/userdata1.paruqet');
# Ambiguous name.
# query I
# select count(*)
# from read_parquet('../../testdata/parquet/userdata1.parquet') p
# from read_parquet('${PWD}/testdata/parquet/userdata1.parquet') p
# inner join (values ('Sweden')) as c(country) on p.country = c.country
# ----
# 1000

# query I
# select count(*)
# from read_parquet('../../testdata/parquet/userdata1.parquet') p
# from read_parquet('${PWD}/testdata/parquet/userdata1.parquet') p
# inner join (select 'Sweden') as c(country) on p.country = c.country
# ----
# 1000
6 changes: 3 additions & 3 deletions testdata/sqllogictests/glob.slt
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
query I
select count(*) from '../../testdata/parquet/userdata1.parquet';
select count(*) from '${PWD}/testdata/parquet/userdata1.parquet';
----
1000

query I
select count(*) from '../../testdata/parquet/*.parquet' as pd where pd.id != '';
select count(*) from '${PWD}/testdata/parquet/*.parquet' as pd where pd.id != '';
----
2000

query I
select count(*) from '../../testdata/parquet/*.parquet';
select count(*) from '${PWD}/testdata/parquet/*.parquet';
----
2000
Loading