Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: options keyword is required before options block #2445

Merged
merged 18 commits into from
Feb 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/pr-cleanup.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
run: |
gh extension install actions/gh-actions-cache

set -o pipefile
set -o pipefail
set -o errexit

gh cache list --repo="${{ github.repository }}" \
Expand Down
35 changes: 25 additions & 10 deletions crates/sqlexec/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -823,16 +823,32 @@ impl<'a> CustomParser<'a> {
self.parse_optional_ref("FORMAT")
}

/// Parse options for a datasource.
/// Parse options block.
fn parse_options(&mut self) -> Result<StmtOptions, ParserError> {
// Optional "OPTIONS" keyword
let _ = self.consume_token(&Token::make_keyword("OPTIONS"));
let has_options_keyword = self.consume_token(&Token::make_keyword("OPTIONS"));
let has_block_opening = self.parser.consume_token(&Token::LParen);

if !self.parser.consume_token(&Token::LParen) {
// If there's no "(" assume there's no options.
if !has_options_keyword && has_block_opening {
return self.expected("OPTIONS", Token::LParen);
};

if !has_options_keyword && !has_block_opening {
return Ok(StmtOptions::new(BTreeMap::new()));
};

if has_options_keyword && !has_block_opening {
return self.expected("OPTIONS block ( )", Token::EOF);
}

// reject options clause without options keyword, and options
// keyword with no parenthetical block.
//
// return empty option maps when no OPTIONS clause-and-block
// are provied
//
// CONSIDER: return error for `OPTIONS ( )` [no options
// specified], as this seems unlikely to be intentional.

let mut options = BTreeMap::new();
loop {
if self.parser.consume_token(&Token::RParen) {
Expand Down Expand Up @@ -1387,7 +1403,7 @@ mod tests {
options.clone(),
),
(
"(
"OPTIONS (
o1 = 'abc',
o2 = TRUE,
o3 = def,
Expand All @@ -1407,7 +1423,7 @@ mod tests {
options.clone(),
),
(
"(
"OPTIONS (
o1 'abc',
o2 TRUE,
o3 def,
Expand All @@ -1428,7 +1444,7 @@ mod tests {
options.clone(),
),
(
"(
"OPTIONS (
o1 = 'abc',
o2 = TRUE,
o3 = def,
Expand All @@ -1448,7 +1464,7 @@ mod tests {
options.clone(),
),
(
"(
"OPTIONS (
o1 'abc',
o2 TRUE,
o3 def,
Expand All @@ -1460,7 +1476,6 @@ mod tests {
// Empty
("", BTreeMap::new()),
("OPTIONS ( )", BTreeMap::new()),
("()", BTreeMap::new()),
];

for (sql, map) in test_cases {
Expand Down
3 changes: 1 addition & 2 deletions testdata/sqllogictests/allowed_operations.slt
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ READ_ONLY

statement ok
CREATE EXTERNAL TABLE debug_table
FROM debug (
FROM debug OPTIONS (
table_type 'never_ending'
);

Expand Down Expand Up @@ -92,4 +92,3 @@ FROM glare_catalog.databases
WHERE database_name = 'default';
----
READ_WRITE

8 changes: 7 additions & 1 deletion testdata/sqllogictests/external_table.slt
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
statement ok
create external table t1 from debug options (table_type = 'error_during_execution');

statement error
create external table t1 from debug (table_type = 'error_during_execution');

statement error
create external table t1 from debug options;

statement error Duplicate name
create external table t1 from debug options (table_type = 'never_ending');

Expand All @@ -14,7 +20,7 @@ statement ok
CREATE OR REPLACE EXTERNAL TABLE T1 FROM DEBUG OPTIONS (TABLE_TYPE = 'never_ending');

statement ok
CrEaTe ExTeRnAl TaBlE if not exists SuppLIER fRoM lOcAl (LoCaTiOn '${PWD}/testdata/parquet/userdata1.parquet');
CrEaTe ExTeRnAl TaBlE if not exists SuppLIER fRoM lOcAl oPtiOns (LoCaTiOn '${PWD}/testdata/parquet/userdata1.parquet');

statement ok
drop table supplier;
Expand Down
1 change: 0 additions & 1 deletion testdata/sqllogictests_iceberg/s3.slt
Original file line number Diff line number Diff line change
Expand Up @@ -102,4 +102,3 @@ RAIL 260
REG AIR 314
SHIP 316
TRUCK 264

4 changes: 2 additions & 2 deletions testdata/sqllogictests_object_store/azure/delta.slt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
statement ok
CREATE CREDENTIALS azure_creds PROVIDER azure
( account_name '${AZURE_ACCOUNT}',
access_key = '${AZURE_ACCESS_KEY}' );
OPTIONS ( account_name '${AZURE_ACCOUNT}',
access_key = '${AZURE_ACCESS_KEY}' );

# Tests `delta_scan` with delta table in Azure.

Expand Down
9 changes: 4 additions & 5 deletions testdata/sqllogictests_object_store/azure/external_table.slt
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ copy ( values (1, 2) ) to 'azure://glaredb-test/ext-table.csv'
credentials azure_creds;

statement ok
create external table ext_table from azure (
create external table ext_table from azure options (
account_name = '${AZURE_ACCOUNT}',
access_key = '${AZURE_ACCESS_KEY}',
location 'azure://glaredb-test/ext-table.csv'
Expand All @@ -34,7 +34,7 @@ copy ( values (3, 4) ) to 'azure://glaredb-test/ext-table-1.csv'

statement ok
create external table ext_table_1 from azure
credentials azure_creds
credentials azure_creds options
(
location 'azure://glaredb-test/ext-table*'
);
Expand All @@ -57,14 +57,14 @@ copy ( values (7, 8) ) to 'azure://glaredb-test/pq-table-2'

statement error unable to resolve file type from the objects
create external table ext_table_2 from azure
credentials azure_creds
credentials azure_creds options
(
location 'azure://glaredb-test/pq-table*'
);

statement ok
create external table ext_table_2 from azure
credentials azure_creds
credentials azure_creds options
(
location 'azure://glaredb-test/pq-table*',
file_type parquet
Expand All @@ -75,4 +75,3 @@ select * from ext_table_2;
----
5 6
7 8

Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ SELECT b, a FROM csv_scan(
# Credentials
statement ok
CREATE CREDENTIALS gcp_creds PROVIDER gcp
( service_account_key '${GCP_SERVICE_ACCOUNT_KEY}' );
OPTIONS ( service_account_key '${GCP_SERVICE_ACCOUNT_KEY}' );

statement ok
COPY ( SELECT 5 AS a, 6 AS b )
Expand Down
4 changes: 2 additions & 2 deletions testdata/sqllogictests_object_store/gcs/delta.slt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
statement ok
CREATE CREDENTIALS gcp_creds PROVIDER gcp
CREATE CREDENTIALS gcp_creds PROVIDER gcp OPTIONS
( service_account_key '${GCP_SERVICE_ACCOUNT_KEY}' );

# Tests `delta_scan` with delta table in gcs.
Expand Down Expand Up @@ -46,5 +46,5 @@ create external table delta_gcs_opts
from delta
options (
location 'gs://${GCS_BUCKET_NAME}/delta/table1',
service_account_key 'wrong_key'
service_account_key 'wrong_key'
);
10 changes: 5 additions & 5 deletions testdata/sqllogictests_object_store/gcs/external_table.slt
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@

# Credentials
statement ok
CREATE CREDENTIALS gcp_creds PROVIDER gcp
CREATE CREDENTIALS gcp_creds PROVIDER gcp OPTIONS
( service_account_key '${GCP_SERVICE_ACCOUNT_KEY}' );

statement ok
copy ( values (1, 2) ) to 'gs://${GCS_BUCKET_NAME}/ext-table.csv'
credentials gcp_creds;

statement ok
create external table ext_table from gcs (
create external table ext_table from gcs options (
service_account_key '${GCP_SERVICE_ACCOUNT_KEY}',
bucket '${GCS_BUCKET_NAME}',
location 'ext-table.csv'
Expand All @@ -31,7 +31,7 @@ copy ( values (3, 4) ) to 'gs://${GCS_BUCKET_NAME}/ext-table-1.csv'

statement ok
create external table ext_table_1 from gcs
credentials gcp_creds
credentials gcp_creds options
(
bucket '${GCS_BUCKET_NAME}',
location 'ext-table*'
Expand All @@ -55,15 +55,15 @@ copy ( values (7, 8) ) to 'gs://${GCS_BUCKET_NAME}/pq-table-2'

statement error unable to resolve file type from the objects
create external table ext_table_2 from gcs
credentials gcp_creds
credentials gcp_creds options
(
bucket '${GCS_BUCKET_NAME}',
location 'pq-table*'
);

statement ok
create external table ext_table_2 from gcs
credentials gcp_creds
credentials gcp_creds options
(
bucket '${GCS_BUCKET_NAME}',
location 'pq-table*',
Expand Down
2 changes: 1 addition & 1 deletion testdata/sqllogictests_object_store/gcs/iceberg.slt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
statement ok
CREATE CREDENTIALS gcp_creds PROVIDER gcp
CREATE CREDENTIALS gcp_creds PROVIDER gcp OPTIONS
( service_account_key '${GCP_SERVICE_ACCOUNT_KEY}' );

# Tests external iceberg table in gcs with credentials object.
Expand Down
2 changes: 1 addition & 1 deletion testdata/sqllogictests_object_store/gcs/lance.slt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
statement ok
CREATE CREDENTIALS gcp_creds PROVIDER gcp
CREATE CREDENTIALS gcp_creds PROVIDER gcp OPTIONS
( service_account_key '${GCP_SERVICE_ACCOUNT_KEY}' );

# Tests `lance_scan` with lance table in gcs.
Expand Down
14 changes: 7 additions & 7 deletions testdata/sqllogictests_object_store/local/external_table.slt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ statement ok
copy ( values (1, 2) ) to '${TMP}/ext-table.csv';

statement ok
create external table ext_table from local (
create external table ext_table from local options (
location '${TMP}/ext-table.csv'
);

Expand All @@ -19,7 +19,7 @@ statement ok
copy ( values (3, 4) ) to '${TMP}/ext-table-1.csv';

statement ok
create external table ext_table_1 from local (
create external table ext_table_1 from local options (
location '${TMP}/ext-table*'
);

Expand All @@ -38,12 +38,12 @@ statement ok
copy ( values (7, 8) ) to '${TMP}/pq-table-2' format parquet;

statement error unable to resolve file type from the objects
create external table ext_table_2 from local (
create external table ext_table_2 from local options (
location '${TMP}/pq-table*'
);

statement ok
create external table ext_table_2 from local (
create external table ext_table_2 from local options (
location '${TMP}/pq-table*',
file_type parquet
);
Expand All @@ -57,7 +57,7 @@ select * from ext_table_2;
# Check compressed files

statement ok
create external table ext_table_3 from local (
create external table ext_table_3 from local options (
location '${PWD}/testdata/sqllogictests_datasources_common/data/bikeshare_stations.csv.gz',
file_type csv
);
Expand All @@ -71,7 +71,7 @@ select count(*) from ext_table_3;

# Giving wrong compression should error
statement ok
create external table ext_table_4 from local (
create external table ext_table_4 from local options (
location '${PWD}/testdata/sqllogictests_datasources_common/data/bikeshare_stations.csv.gz',
file_type csv,
compression xz
Expand All @@ -81,7 +81,7 @@ statement error stream/file format not recognized
select * from ext_table_4;

statement ok
create external table ext_table_5 from local (
create external table ext_table_5 from local options (
location '${PWD}/testdata/sqllogictests_datasources_common/data/bikeshare_stations.csv.gz',
file_type csv,
compression gz
Expand Down
8 changes: 4 additions & 4 deletions testdata/sqllogictests_object_store/s3/copy_to_and_scan.slt
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ statement ok
COPY ( SELECT 5 AS a, 6 AS b )
TO 's3://${AWS_S3_BUCKET_NAME}/copy_to/with_creds.csv'
CREDENTIALS aws_creds
( region '${AWS_S3_REGION}' );
OPTIONS ( region '${AWS_S3_REGION}' );

query II
SELECT a, b FROM csv_scan(
Expand Down Expand Up @@ -114,7 +114,7 @@ statement ok
COPY ( SELECT 7 AS a, 8 AS b )
TO 's3://${AWS_S3_BUCKET_NAME}/copy_to_with_creds.csv'
CREDENTIALS aws_creds
( region '${AWS_S3_REGION}' );
OPTIONS ( region '${AWS_S3_REGION}' );

query II rowsort
SELECT a, b FROM csv_scan(
Expand All @@ -137,13 +137,13 @@ statement ok
COPY ( VALUES (1, 2) )
TO 's3://${AWS_S3_BUCKET_NAME}/parquet-test/f1.parquet'
CREDENTIALS aws_creds
( region '${AWS_S3_REGION}' );
OPTIONS ( region '${AWS_S3_REGION}' );

statement ok
COPY ( VALUES (3, 4) )
TO 's3://${AWS_S3_BUCKET_NAME}/parquet-test/f2.parquet'
CREDENTIALS aws_creds
( region '${AWS_S3_REGION}' );
OPTIONS ( region '${AWS_S3_REGION}' );

query II rowsort
SELECT * FROM parquet_scan(
Expand Down
4 changes: 2 additions & 2 deletions testdata/sqllogictests_object_store/s3/delta.slt
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ from delta
options (
location 's3://${AWS_S3_BUCKET_NAME}/delta/table1',
access_key_id = '${AWS_ACCESS_KEY_ID}',
secret_access_key = '${AWS_SECRET_ACCESS_KEY}',
region '${AWS_S3_REGION}'
secret_access_key = '${AWS_SECRET_ACCESS_KEY}',
region '${AWS_S3_REGION}'
);

query IT
Expand Down
Loading
Loading