From 07868a52b8faa8f222da7c59d3fba75a8a045800 Mon Sep 17 00:00:00 2001 From: Chaoyi Sun Date: Wed, 2 Oct 2024 12:59:30 -0700 Subject: [PATCH] #363 Update semantic data to use java 17 --- DRAFT_RELEASE_NOTES.md | 2 +- build-parent/pom.xml | 3 ++- .../extensions-encryption-vault-java/pom.xml | 4 ++-- .../tests/deployment_test.yaml | 4 ++-- .../values.yaml | 5 ++-- ...very-separate-data-records-java.pom.xml.vm | 1 + .../spark.schema.base.java.vm | 23 ++++++++++--------- .../data-delivery-spark/encryption.java.vm | 9 -------- 8 files changed, 23 insertions(+), 28 deletions(-) diff --git a/DRAFT_RELEASE_NOTES.md b/DRAFT_RELEASE_NOTES.md index 80ec79f6c..ff4154cc7 100644 --- a/DRAFT_RELEASE_NOTES.md +++ b/DRAFT_RELEASE_NOTES.md @@ -3,7 +3,7 @@ ## TBD # Breaking Changes -Note: instructions for adapting to these changes are outlined in the upgrade instructions below. +We have upgraded the baseline to Java 17 and the minimal maven version for the build is now 3.9.6. There could be some deprecated classes or incompatible modules. Please update your dependency module accordingly. _There are no breaking changes in the 1.10 release._ diff --git a/build-parent/pom.xml b/build-parent/pom.xml index 4219e8c00..575844a5c 100644 --- a/build-parent/pom.xml +++ b/build-parent/pom.xml @@ -66,6 +66,7 @@ 5.1.0 4.0.3 3.5.1 + 1.9.3 4.0.1 @@ -121,7 +122,7 @@ ${maven.compiler.source} - 3.6.2 + 3.9.6 ${maven.compiler.argument.source} 18 ERROR diff --git a/extensions/extensions-encryption/extensions-encryption-vault-java/pom.xml b/extensions/extensions-encryption/extensions-encryption-vault-java/pom.xml index 0d7cb115e..b1b0a9f6a 100644 --- a/extensions/extensions-encryption/extensions-encryption-vault-java/pom.xml +++ b/extensions/extensions-encryption/extensions-encryption-vault-java/pom.xml @@ -46,13 +46,13 @@ info.cukes cucumber-java - 1.2.5 + 1.2.6 test info.cukes cucumber-junit - 1.2.5 + 1.2.6 test diff --git a/extensions/extensions-helm/aissemble-spark-application-chart/tests/deployment_test.yaml b/extensions/extensions-helm/aissemble-spark-application-chart/tests/deployment_test.yaml index e3d6db564..116a0c534 100644 --- a/extensions/extensions-helm/aissemble-spark-application-chart/tests/deployment_test.yaml +++ b/extensions/extensions-helm/aissemble-spark-application-chart/tests/deployment_test.yaml @@ -11,7 +11,7 @@ tests: value: "placeholder" - equal: path: spec.sparkVersion - value: 3.4.0 + value: 3.5.2 - equal: path: spec.dynamicAllocation.enabled value: true @@ -44,7 +44,7 @@ tests: value: "512m" - equal: path: spec.executor.labels.version - value: 3.4.0 + value: 3.5.2 - it: Should set nodeSelector and gpu appropriately set: diff --git a/extensions/extensions-helm/aissemble-spark-application-chart/values.yaml b/extensions/extensions-helm/aissemble-spark-application-chart/values.yaml index 954bee43b..655050bea 100644 --- a/extensions/extensions-helm/aissemble-spark-application-chart/values.yaml +++ b/extensions/extensions-helm/aissemble-spark-application-chart/values.yaml @@ -10,7 +10,7 @@ sparkApp: imagePullPolicy: IfNotPresent restartPolicy: type: Never - sparkVersion: "3.4.0" + sparkVersion: "3.5.2" sparkConf: spark.hive.server2.thrift.port: "10000" spark.hive.server2.thrift.http.port: "10001" @@ -21,6 +21,7 @@ sparkApp: spark.hive.metastore.uris: "thrift://hive-metastore-service:9083/default" spark.eventLog.dir: "/opt/spark/spark-events" spark.hive.metastore.warehouse.dir: "s3a://spark-infrastructure/warehouse" + spark.jars.ivy: "/opt/spark/.ivy2" dynamicAllocation: enabled: true initialExecutors: 0 @@ -48,7 +49,7 @@ sparkApp: coreLimit: "1200m" memory: "512m" labels: - version: 3.4.0 + version: 3.5.2 volumeMounts: - name: ivy-cache mountPath: "/opt/spark/.ivy2" diff --git a/foundation/foundation-mda/src/main/resources/templates/data-delivery-data-records/data-delivery-separate-data-records-java.pom.xml.vm b/foundation/foundation-mda/src/main/resources/templates/data-delivery-data-records/data-delivery-separate-data-records-java.pom.xml.vm index 33f47a0d6..5301017f5 100644 --- a/foundation/foundation-mda/src/main/resources/templates/data-delivery-data-records/data-delivery-separate-data-records-java.pom.xml.vm +++ b/foundation/foundation-mda/src/main/resources/templates/data-delivery-data-records/data-delivery-separate-data-records-java.pom.xml.vm @@ -30,6 +30,7 @@ org.codehaus.jackson jackson-mapper-asl + ${version.jackson.mapper.asl} com.boozallen.aissemble diff --git a/foundation/foundation-mda/src/main/resources/templates/data-delivery-data-records/spark.schema.base.java.vm b/foundation/foundation-mda/src/main/resources/templates/data-delivery-data-records/spark.schema.base.java.vm index bd367535c..c7a904a91 100644 --- a/foundation/foundation-mda/src/main/resources/templates/data-delivery-data-records/spark.schema.base.java.vm +++ b/foundation/foundation-mda/src/main/resources/templates/data-delivery-data-records/spark.schema.base.java.vm @@ -13,7 +13,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.spark.sql.Column; -import scala.collection.JavaConversions; +import scala.collection.JavaConverters; import scala.collection.Seq; import com.boozallen.aiops.data.delivery.spark.SparkSchema; @@ -92,28 +92,28 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema { #foreach ($field in $record.fields) #set ( $columnName = "#if($field.column)$field.column#{else}$field.upperSnakecaseName#end" ) #if (${field.isRequired()}) - .withColumn("${columnName}_IS_NOT_NULL", col("${columnName}").isNotNull()) + .withColumn("${columnName}_IS_NOT_NULL", col(${columnName}_COLUMN).isNotNull()) #end #if (${field.getValidation().getMinValue()}) - .withColumn("${columnName}_GREATER_THAN_MIN", col("${columnName}").gt(lit(${field.getValidation().getMinValue()})).or(col("${columnName}").equalTo(lit(${field.getValidation().getMinValue()})))) + .withColumn("${columnName}_GREATER_THAN_MIN", col(${columnName}_COLUMN).gt(lit(${field.getValidation().getMinValue()})).or(col(${columnName}_COLUMN).equalTo(lit(${field.getValidation().getMinValue()})))) #end #if (${field.getValidation().getMaxValue()}) - .withColumn("${columnName}_LESS_THAN_MAX", col("${columnName}").lt(lit(${field.getValidation().getMaxValue()})).or(col("${columnName}").equalTo(lit(${field.getValidation().getMaxValue()})))) + .withColumn("${columnName}_LESS_THAN_MAX", col(${columnName}_COLUMN).lt(lit(${field.getValidation().getMaxValue()})).or(col(${columnName}_COLUMN).equalTo(lit(${field.getValidation().getMaxValue()})))) #end #if (${field.getValidation().getScale()}) - .withColumn("${columnName}_MATCHES_SCALE", col("${columnName}").rlike(("^[0-9]*(?:\\.[0-9]{0,${field.getValidation().getScale()}})?$"))) + .withColumn("${columnName}_MATCHES_SCALE", col(${columnName}_COLUMN).rlike(("^[0-9]*(?:\\.[0-9]{0,${field.getValidation().getScale()}})?$"))) #end #if (${field.getValidation().getMinLength()}) - .withColumn("${columnName}_GREATER_THAN_MAX_LENGTH", col("${columnName}").rlike(("^.{${field.getValidation().getMinLength()},}"))) + .withColumn("${columnName}_GREATER_THAN_MAX_LENGTH", col(${columnName}_COLUMN).rlike(("^.{${field.getValidation().getMinLength()},}"))) #end #if (${field.getValidation().getMaxLength()}) - .withColumn("${columnName}_LESS_THAN_MAX_LENGTH", col("${columnName}").rlike(("^.{${field.getValidation().getMaxLength()},}")).equalTo(lit(false))) + .withColumn("${columnName}_LESS_THAN_MAX_LENGTH", col(${columnName}_COLUMN).rlike(("^.{${field.getValidation().getMaxLength()},}")).equalTo(lit(false))) #end #foreach ($format in $field.getValidation().getFormats()) #if ($foreach.first) - .withColumn("${columnName}_MATCHES_FORMAT", col("${columnName}").rlike(("$format.replace("\","\\")")) + .withColumn("${columnName}_MATCHES_FORMAT", col(${columnName}_COLUMN).rlike(("$format.replace("\","\\")")) #else - .or(col("${columnName}").rlike(("$format.replace("\","\\")"))) + .or(col(${columnName}_COLUMN).rlike(("$format.replace("\","\\")"))) #end #if ($foreach.last) ) @@ -122,7 +122,7 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema { #end ; Column filterSchema = null; - List validationColumns = new ArrayList(); + List validationColumns = new ArrayList<>(); Collections.addAll(validationColumns, dataWithValidations.columns()); validationColumns.removeAll(Arrays.asList(data.columns())); for (String columnName : validationColumns) { @@ -139,7 +139,8 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema { } // Remove validation columns from valid data - Seq columnsToDrop = JavaConversions.asScalaBuffer(validationColumns).toSeq(); + Seq columnsToDrop = + JavaConverters.collectionAsScalaIterableConverter(validationColumns).asScala().toSeq(); validData = validData.drop(columnsToDrop); return validData; diff --git a/foundation/foundation-mda/src/main/resources/templates/data-delivery-spark/encryption.java.vm b/foundation/foundation-mda/src/main/resources/templates/data-delivery-spark/encryption.java.vm index 84a3d18a6..df1a40bb9 100644 --- a/foundation/foundation-mda/src/main/resources/templates/data-delivery-spark/encryption.java.vm +++ b/foundation/foundation-mda/src/main/resources/templates/data-delivery-spark/encryption.java.vm @@ -99,15 +99,6 @@ ${step.encryptionSignature} { datasetWithEncryptionPolicyApplied = ds; #end #end - #elseif ($step.hasMessagingInbound()) - ## Input was a message - #if ($step.hasInboundRecordType()) - ## Input was a message with custom record - Encoder<${step.inbound.recordType.name}> recordEncoder = Encoders.bean(${step.inbound.recordType.name}.class); - datasetWithEncryptionPolicyApplied = (${step.inbound.recordType.name}) ds.as(recordEncoder).collect(); - #else - ## Input was a message with string - #end #end } }