diff --git a/spark/v3.3/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala b/spark/v3.3/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala index 7c17ea667e0b..b281cfedb7bc 100644 --- a/spark/v3.3/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala +++ b/spark/v3.3/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala @@ -29,6 +29,7 @@ import org.apache.iceberg.common.DynConstructors import org.apache.iceberg.spark.ExtendedParser import org.apache.iceberg.spark.ExtendedParser.RawOrderField import org.apache.iceberg.spark.Spark3Util +import org.apache.iceberg.spark.procedures.SparkProcedures import org.apache.iceberg.spark.source.SparkTable import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.SparkSession @@ -194,8 +195,10 @@ class IcebergSparkSqlExtensionsParser(delegate: ParserInterface) extends ParserI // Strip comments of the form /* ... */. This must come after stripping newlines so that // comments that span multiple lines are caught. .replaceAll("/\\*.*?\\*/", " ") + // Strip backtick then `system`.`ancestors_of` changes to system.ancestors_of + .replaceAll("`", "") .trim() - normalized.startsWith("call") || ( + isIcebergProcedure(normalized) || ( normalized.startsWith("alter table") && ( normalized.contains("add partition field") || normalized.contains("drop partition field") || @@ -209,6 +212,12 @@ class IcebergSparkSqlExtensionsParser(delegate: ParserInterface) extends ParserI isSnapshotRefDdl(normalized))) } + // All builtin Iceberg procedures are under the 'system' namespace + private def isIcebergProcedure(normalized: String): Boolean = { + normalized.startsWith("call") && + SparkProcedures.names().asScala.map("system." + _).exists(normalized.contains) + } + private def isSnapshotRefDdl(normalized: String): Boolean = { normalized.contains("create branch") || normalized.contains("replace branch") || diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestCallStatementParser.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestCallStatementParser.java index 42f29c28c149..0e5948028bb3 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestCallStatementParser.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestCallStatementParser.java @@ -73,12 +73,37 @@ public static void stopSpark() { currentSpark.stop(); } + @Test + public void testDelegateUnsupportedProcedure() { + assertThatThrownBy(() -> parser.parsePlan("CALL cat.d.t()")) + .isInstanceOf(ParseException.class) + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); + } + + @Test + public void testCallWithBackticks() throws ParseException { + CallStatement call = + (CallStatement) parser.parsePlan("CALL cat.`system`.`rollback_to_snapshot`()"); + Assert.assertEquals( + ImmutableList.of("cat", "system", "rollback_to_snapshot"), + JavaConverters.seqAsJavaList(call.name())); + Assert.assertEquals(0, call.args().size()); + } + @Test public void testCallWithPositionalArgs() throws ParseException { CallStatement call = - (CallStatement) parser.parsePlan("CALL c.n.func(1, '2', 3L, true, 1.0D, 9.0e1, 900e-1BD)"); + (CallStatement) + parser.parsePlan( + "CALL c.system.rollback_to_snapshot(1, '2', 3L, true, 1.0D, 9.0e1, 900e-1BD)"); Assert.assertEquals( - ImmutableList.of("c", "n", "func"), JavaConverters.seqAsJavaList(call.name())); + ImmutableList.of("c", "system", "rollback_to_snapshot"), + JavaConverters.seqAsJavaList(call.name())); Assert.assertEquals(7, call.args().size()); @@ -94,9 +119,12 @@ public void testCallWithPositionalArgs() throws ParseException { @Test public void testCallWithNamedArgs() throws ParseException { CallStatement call = - (CallStatement) parser.parsePlan("CALL cat.system.func(c1 => 1, c2 => '2', c3 => true)"); + (CallStatement) + parser.parsePlan( + "CALL cat.system.rollback_to_snapshot(c1 => 1, c2 => '2', c3 => true)"); Assert.assertEquals( - ImmutableList.of("cat", "system", "func"), JavaConverters.seqAsJavaList(call.name())); + ImmutableList.of("cat", "system", "rollback_to_snapshot"), + JavaConverters.seqAsJavaList(call.name())); Assert.assertEquals(3, call.args().size()); @@ -107,9 +135,11 @@ public void testCallWithNamedArgs() throws ParseException { @Test public void testCallWithMixedArgs() throws ParseException { - CallStatement call = (CallStatement) parser.parsePlan("CALL cat.system.func(c1 => 1, '2')"); + CallStatement call = + (CallStatement) parser.parsePlan("CALL cat.system.rollback_to_snapshot(c1 => 1, '2')"); Assert.assertEquals( - ImmutableList.of("cat", "system", "func"), JavaConverters.seqAsJavaList(call.name())); + ImmutableList.of("cat", "system", "rollback_to_snapshot"), + JavaConverters.seqAsJavaList(call.name())); Assert.assertEquals(2, call.args().size()); @@ -121,9 +151,11 @@ public void testCallWithMixedArgs() throws ParseException { public void testCallWithTimestampArg() throws ParseException { CallStatement call = (CallStatement) - parser.parsePlan("CALL cat.system.func(TIMESTAMP '2017-02-03T10:37:30.00Z')"); + parser.parsePlan( + "CALL cat.system.rollback_to_snapshot(TIMESTAMP '2017-02-03T10:37:30.00Z')"); Assert.assertEquals( - ImmutableList.of("cat", "system", "func"), JavaConverters.seqAsJavaList(call.name())); + ImmutableList.of("cat", "system", "rollback_to_snapshot"), + JavaConverters.seqAsJavaList(call.name())); Assert.assertEquals(1, call.args().size()); @@ -134,9 +166,11 @@ public void testCallWithTimestampArg() throws ParseException { @Test public void testCallWithVarSubstitution() throws ParseException { CallStatement call = - (CallStatement) parser.parsePlan("CALL cat.system.func('${spark.extra.prop}')"); + (CallStatement) + parser.parsePlan("CALL cat.system.rollback_to_snapshot('${spark.extra.prop}')"); Assert.assertEquals( - ImmutableList.of("cat", "system", "func"), JavaConverters.seqAsJavaList(call.name())); + ImmutableList.of("cat", "system", "rollback_to_snapshot"), + JavaConverters.seqAsJavaList(call.name())); Assert.assertEquals(1, call.args().size()); @@ -145,30 +179,32 @@ public void testCallWithVarSubstitution() throws ParseException { @Test public void testCallParseError() { - assertThatThrownBy(() -> parser.parsePlan("CALL cat.system radish kebab")) + assertThatThrownBy(() -> parser.parsePlan("CALL cat.system.rollback_to_snapshot kebab")) .as("Should fail with a sensible parse error") .isInstanceOf(IcebergParseException.class) - .hasMessageContaining("missing '(' at 'radish'"); + .hasMessageContaining("missing '(' at 'kebab'"); } @Test public void testCallStripsComments() throws ParseException { List callStatementsWithComments = Lists.newArrayList( - "/* bracketed comment */ CALL cat.system.func('${spark.extra.prop}')", - "/**/ CALL cat.system.func('${spark.extra.prop}')", - "-- single line comment \n CALL cat.system.func('${spark.extra.prop}')", - "-- multiple \n-- single line \n-- comments \n CALL cat.system.func('${spark.extra.prop}')", - "/* select * from multiline_comment \n where x like '%sql%'; */ CALL cat.system.func('${spark.extra.prop}')", + "/* bracketed comment */ CALL cat.system.rollback_to_snapshot('${spark.extra.prop}')", + "/**/ CALL cat.system.rollback_to_snapshot('${spark.extra.prop}')", + "-- single line comment \n CALL cat.system.rollback_to_snapshot('${spark.extra.prop}')", + "-- multiple \n-- single line \n-- comments \n CALL cat.system.rollback_to_snapshot('${spark.extra.prop}')", + "/* select * from multiline_comment \n where x like '%sql%'; */ CALL cat.system.rollback_to_snapshot('${spark.extra.prop}')", "/* {\"app\": \"dbt\", \"dbt_version\": \"1.0.1\", \"profile_name\": \"profile1\", \"target_name\": \"dev\", " - + "\"node_id\": \"model.profile1.stg_users\"} \n*/ CALL cat.system.func('${spark.extra.prop}')", + + "\"node_id\": \"model.profile1.stg_users\"} \n*/ CALL cat.system.rollback_to_snapshot('${spark.extra.prop}')", "/* Some multi-line comment \n" - + "*/ CALL /* inline comment */ cat.system.func('${spark.extra.prop}') -- ending comment", - "CALL -- a line ending comment\n" + "cat.system.func('${spark.extra.prop}')"); + + "*/ CALL /* inline comment */ cat.system.rollback_to_snapshot('${spark.extra.prop}') -- ending comment", + "CALL -- a line ending comment\n" + + "cat.system.rollback_to_snapshot('${spark.extra.prop}')"); for (String sqlText : callStatementsWithComments) { CallStatement call = (CallStatement) parser.parsePlan(sqlText); Assert.assertEquals( - ImmutableList.of("cat", "system", "func"), JavaConverters.seqAsJavaList(call.name())); + ImmutableList.of("cat", "system", "rollback_to_snapshot"), + JavaConverters.seqAsJavaList(call.name())); Assert.assertEquals(1, call.args().size()); diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestCherrypickSnapshotProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestCherrypickSnapshotProcedure.java index b489cf76f1ef..8ac21fa0c7a3 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestCherrypickSnapshotProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestCherrypickSnapshotProcedure.java @@ -31,8 +31,9 @@ import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; +import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; +import org.junit.Assert; import org.junit.Test; public class TestCherrypickSnapshotProcedure extends SparkExtensionsTestBase { @@ -178,8 +179,13 @@ public void testInvalidCherrypickSnapshotCases() { assertThatThrownBy(() -> sql("CALL %s.custom.cherrypick_snapshot('n', 't', 1L)", catalogName)) .as("Should not resolve procedures in arbitrary namespaces") - .isInstanceOf(NoSuchProcedureException.class) - .hasMessageContaining("not found"); + .isInstanceOf(ParseException.class) + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.cherrypick_snapshot('t')", catalogName)) .as("Should reject calls without all required args") diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestExpireSnapshotsProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestExpireSnapshotsProcedure.java index e9f42f5e0195..25dc7e47e971 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestExpireSnapshotsProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestExpireSnapshotsProcedure.java @@ -52,7 +52,7 @@ import org.apache.iceberg.spark.source.SimpleRecord; import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.Encoders; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; +import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; import org.junit.Assert; import org.junit.Test; @@ -178,8 +178,12 @@ public void testInvalidExpireSnapshotsCases() { assertThatThrownBy(() -> sql("CALL %s.custom.expire_snapshots('n', 't')", catalogName)) .as("Should not resolve procedures in arbitrary namespaces") - .isInstanceOf(NoSuchProcedureException.class) - .hasMessageContaining("not found"); + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.expire_snapshots()", catalogName)) .as("Should reject calls without all required args") diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestFastForwardBranchProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestFastForwardBranchProcedure.java index 37d40957bc44..c960b0b98660 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestFastForwardBranchProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestFastForwardBranchProcedure.java @@ -30,8 +30,9 @@ import org.apache.iceberg.Table; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.spark.sql.AnalysisException; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; +import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; +import org.junit.Assert; import org.junit.Test; public class TestFastForwardBranchProcedure extends SparkExtensionsTestBase { @@ -176,8 +177,13 @@ public void testInvalidFastForwardBranchCases() { assertThatThrownBy( () -> sql("CALL %s.custom.fast_forward('test_table', 'main', 'newBranch')", catalogName)) - .isInstanceOf(NoSuchProcedureException.class) - .hasMessage("Procedure custom.fast_forward not found"); + .isInstanceOf(ParseException.class) + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.fast_forward('test_table', 'main')", catalogName)) .isInstanceOf(AnalysisException.class) diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestPublishChangesProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestPublishChangesProcedure.java index e71ad687293b..5c65d7184f07 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestPublishChangesProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestPublishChangesProcedure.java @@ -31,8 +31,9 @@ import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; +import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; +import org.junit.Assert; import org.junit.Test; public class TestPublishChangesProcedure extends SparkExtensionsTestBase { @@ -176,8 +177,12 @@ public void testInvalidApplyWapChangesCases() { assertThatThrownBy( () -> sql("CALL %s.custom.publish_changes('n', 't', 'not_valid')", catalogName)) .as("Should not resolve procedures in arbitrary namespaces") - .isInstanceOf(NoSuchProcedureException.class) - .hasMessageContaining("not found"); + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.publish_changes('t')", catalogName)) .as("Should reject calls without all required args") diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRemoveOrphanFilesProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRemoveOrphanFilesProcedure.java index 6e193b9a8642..e1966520893a 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRemoveOrphanFilesProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRemoveOrphanFilesProcedure.java @@ -63,7 +63,6 @@ import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Row; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; import org.apache.spark.sql.catalyst.analysis.NoSuchTableException; import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; @@ -266,8 +265,12 @@ public void testInvalidRemoveOrphanFilesCases() { assertThatThrownBy(() -> sql("CALL %s.custom.remove_orphan_files('n', 't')", catalogName)) .as("Should not resolve procedures in arbitrary namespaces") - .isInstanceOf(NoSuchProcedureException.class) - .hasMessageContaining("not found"); + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.remove_orphan_files()", catalogName)) .as("Should reject calls without all required args") diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewriteDataFilesProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewriteDataFilesProcedure.java index 1418d82cebf8..fd8754b30d5b 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewriteDataFilesProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewriteDataFilesProcedure.java @@ -40,7 +40,7 @@ import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; +import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; import org.junit.Assert; import org.junit.Assume; @@ -566,8 +566,12 @@ public void testInvalidCasesForRewriteDataFiles() { assertThatThrownBy(() -> sql("CALL %s.custom.rewrite_data_files('n', 't')", catalogName)) .as("Should not resolve procedures in arbitrary namespaces") - .isInstanceOf(NoSuchProcedureException.class) - .hasMessageContaining("not found"); + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.rewrite_data_files()", catalogName)) .as("Should reject calls without all required args") diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewriteManifestsProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewriteManifestsProcedure.java index 591d88e83fca..dd4c55078a9d 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewriteManifestsProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewriteManifestsProcedure.java @@ -30,8 +30,8 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.RowFactory; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; import org.apache.spark.sql.catalyst.analysis.NoSuchTableException; +import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; import org.junit.Assert; import org.junit.Test; @@ -284,8 +284,12 @@ public void testInvalidRewriteManifestsCases() { assertThatThrownBy(() -> sql("CALL %s.custom.rewrite_manifests('n', 't')", catalogName)) .as("Should not resolve procedures in arbitrary namespaces") - .isInstanceOf(NoSuchProcedureException.class) - .hasMessageContaining("not found"); + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.rewrite_manifests()", catalogName)) .as("Should reject calls without all required args") diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRollbackToSnapshotProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRollbackToSnapshotProcedure.java index b60a769f9f26..f8e20714017c 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRollbackToSnapshotProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRollbackToSnapshotProcedure.java @@ -29,8 +29,9 @@ import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; +import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; +import org.junit.Assert; import org.junit.Assume; import org.junit.Test; @@ -261,8 +262,12 @@ public void testInvalidRollbackToSnapshotCases() { assertThatThrownBy(() -> sql("CALL %s.custom.rollback_to_snapshot('n', 't', 1L)", catalogName)) .as("Should not resolve procedures in arbitrary namespaces") - .isInstanceOf(NoSuchProcedureException.class) - .hasMessageContaining("not found"); + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.rollback_to_snapshot('t')", catalogName)) .as("Should reject calls without all required args") diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRollbackToTimestampProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRollbackToTimestampProcedure.java index c65be60cc1a7..48cf9648fab9 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRollbackToTimestampProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRollbackToTimestampProcedure.java @@ -31,8 +31,9 @@ import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; +import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; +import org.junit.Assert; import org.junit.Assume; import org.junit.Test; @@ -303,8 +304,12 @@ public void testInvalidRollbackToTimestampCases() { assertThatThrownBy( () -> sql("CALL %s.custom.rollback_to_timestamp('n', 't', %s)", catalogName, timestamp)) .as("Should not resolve procedures in arbitrary namespaces") - .isInstanceOf(NoSuchProcedureException.class) - .hasMessageContaining("not found"); + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.rollback_to_timestamp('t')", catalogName)) .as("Should reject calls without all required args") diff --git a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestSetCurrentSnapshotProcedure.java b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestSetCurrentSnapshotProcedure.java index 55bda0df1b3c..a03a44f4f97d 100644 --- a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestSetCurrentSnapshotProcedure.java +++ b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestSetCurrentSnapshotProcedure.java @@ -30,8 +30,9 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; import org.apache.spark.sql.AnalysisException; -import org.apache.spark.sql.catalyst.analysis.NoSuchProcedureException; +import org.apache.spark.sql.catalyst.parser.ParseException; import org.junit.After; +import org.junit.Assert; import org.junit.Assume; import org.junit.Test; @@ -213,8 +214,12 @@ public void testInvalidRollbackToSnapshotCases() { assertThatThrownBy(() -> sql("CALL %s.custom.set_current_snapshot('n', 't', 1L)", catalogName)) .as("Should not resolve procedures in arbitrary namespaces") - .isInstanceOf(NoSuchProcedureException.class) - .hasMessageContaining("not found"); + .satisfies( + exception -> { + ParseException parseException = (ParseException) exception; + Assert.assertEquals("PARSE_SYNTAX_ERROR", parseException.getErrorClass()); + Assert.assertEquals("Syntax error at or near 'CALL'", parseException.message()); + }); assertThatThrownBy(() -> sql("CALL %s.system.set_current_snapshot('t')", catalogName)) .as("Should reject calls without all required args") diff --git a/spark/v3.3/spark/src/main/java/org/apache/iceberg/spark/procedures/SparkProcedures.java b/spark/v3.3/spark/src/main/java/org/apache/iceberg/spark/procedures/SparkProcedures.java index b324cd4422b3..42003b24e94c 100644 --- a/spark/v3.3/spark/src/main/java/org/apache/iceberg/spark/procedures/SparkProcedures.java +++ b/spark/v3.3/spark/src/main/java/org/apache/iceberg/spark/procedures/SparkProcedures.java @@ -20,6 +20,7 @@ import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.function.Supplier; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.spark.sql.connector.catalog.TableCatalog; @@ -37,6 +38,10 @@ public static ProcedureBuilder newBuilder(String name) { return builderSupplier != null ? builderSupplier.get() : null; } + public static Set names() { + return BUILDERS.keySet(); + } + private static Map> initProcedureBuilders() { ImmutableMap.Builder> mapBuilder = ImmutableMap.builder(); mapBuilder.put("rollback_to_snapshot", RollbackToSnapshotProcedure::builder);