Skip to content

Commit

Permalink
#363 Update semantic data to use java 17
Browse files Browse the repository at this point in the history
  • Loading branch information
csun-cpointe committed Oct 2, 2024
1 parent 3bf3e3d commit 07868a5
Show file tree
Hide file tree
Showing 8 changed files with 23 additions and 28 deletions.
2 changes: 1 addition & 1 deletion DRAFT_RELEASE_NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
## TBD

# Breaking Changes
Note: instructions for adapting to these changes are outlined in the upgrade instructions below.
We have upgraded the baseline to Java 17 and the minimal maven version for the build is now 3.9.6. There could be some deprecated classes or incompatible modules. Please update your dependency module accordingly.

_There are no breaking changes in the 1.10 release._

Expand Down
3 changes: 2 additions & 1 deletion build-parent/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@
<version.vault>5.1.0</version.vault>
<version.awaitility>4.0.3</version.awaitility>
<version.plexus.util>3.5.1</version.plexus.util>
<version.jackson.mapper.asl>1.9.3</version.jackson.mapper.asl>

<!-- Java EE Dependencies -->
<version.jakarta.cdi>4.0.1</version.jakarta.cdi>
Expand Down Expand Up @@ -121,7 +122,7 @@
<maven.compiler.argument.testSource>${maven.compiler.source}</maven.compiler.argument.testSource>

<!-- maven-enforcer-plugin -->
<version.maven.min>3.6.2</version.maven.min>
<version.maven.min>3.9.6</version.maven.min>
<version.jdk.required>${maven.compiler.argument.source}</version.jdk.required>
<version.jdk.next-unsupported>18</version.jdk.next-unsupported>
<insecure.repositories>ERROR</insecure.repositories>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,13 @@
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-java</artifactId>
<version>1.2.5</version>
<version>1.2.6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-junit</artifactId>
<version>1.2.5</version>
<version>1.2.6</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ tests:
value: "placeholder"
- equal:
path: spec.sparkVersion
value: 3.4.0
value: 3.5.2
- equal:
path: spec.dynamicAllocation.enabled
value: true
Expand Down Expand Up @@ -44,7 +44,7 @@ tests:
value: "512m"
- equal:
path: spec.executor.labels.version
value: 3.4.0
value: 3.5.2

- it: Should set nodeSelector and gpu appropriately
set:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ sparkApp:
imagePullPolicy: IfNotPresent
restartPolicy:
type: Never
sparkVersion: "3.4.0"
sparkVersion: "3.5.2"
sparkConf:
spark.hive.server2.thrift.port: "10000"
spark.hive.server2.thrift.http.port: "10001"
Expand All @@ -21,6 +21,7 @@ sparkApp:
spark.hive.metastore.uris: "thrift://hive-metastore-service:9083/default"
spark.eventLog.dir: "/opt/spark/spark-events"
spark.hive.metastore.warehouse.dir: "s3a://spark-infrastructure/warehouse"
spark.jars.ivy: "/opt/spark/.ivy2"
dynamicAllocation:
enabled: true
initialExecutors: 0
Expand Down Expand Up @@ -48,7 +49,7 @@ sparkApp:
coreLimit: "1200m"
memory: "512m"
labels:
version: 3.4.0
version: 3.5.2
volumeMounts:
- name: ivy-cache
mountPath: "/opt/spark/.ivy2"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>${version.jackson.mapper.asl}</version>
</dependency>
<dependency>
<groupId>com.boozallen.aissemble</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.spark.sql.Column;
import scala.collection.JavaConversions;
import scala.collection.JavaConverters;
import scala.collection.Seq;

import com.boozallen.aiops.data.delivery.spark.SparkSchema;
Expand Down Expand Up @@ -92,28 +92,28 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema {
#foreach ($field in $record.fields)
#set ( $columnName = "#if($field.column)$field.column#{else}$field.upperSnakecaseName#end" )
#if (${field.isRequired()})
.withColumn("${columnName}_IS_NOT_NULL", col("${columnName}").isNotNull())
.withColumn("${columnName}_IS_NOT_NULL", col(${columnName}_COLUMN).isNotNull())
#end
#if (${field.getValidation().getMinValue()})
.withColumn("${columnName}_GREATER_THAN_MIN", col("${columnName}").gt(lit(${field.getValidation().getMinValue()})).or(col("${columnName}").equalTo(lit(${field.getValidation().getMinValue()}))))
.withColumn("${columnName}_GREATER_THAN_MIN", col(${columnName}_COLUMN).gt(lit(${field.getValidation().getMinValue()})).or(col(${columnName}_COLUMN).equalTo(lit(${field.getValidation().getMinValue()}))))
#end
#if (${field.getValidation().getMaxValue()})
.withColumn("${columnName}_LESS_THAN_MAX", col("${columnName}").lt(lit(${field.getValidation().getMaxValue()})).or(col("${columnName}").equalTo(lit(${field.getValidation().getMaxValue()}))))
.withColumn("${columnName}_LESS_THAN_MAX", col(${columnName}_COLUMN).lt(lit(${field.getValidation().getMaxValue()})).or(col(${columnName}_COLUMN).equalTo(lit(${field.getValidation().getMaxValue()}))))
#end
#if (${field.getValidation().getScale()})
.withColumn("${columnName}_MATCHES_SCALE", col("${columnName}").rlike(("^[0-9]*(?:\\.[0-9]{0,${field.getValidation().getScale()}})?$")))
.withColumn("${columnName}_MATCHES_SCALE", col(${columnName}_COLUMN).rlike(("^[0-9]*(?:\\.[0-9]{0,${field.getValidation().getScale()}})?$")))
#end
#if (${field.getValidation().getMinLength()})
.withColumn("${columnName}_GREATER_THAN_MAX_LENGTH", col("${columnName}").rlike(("^.{${field.getValidation().getMinLength()},}")))
.withColumn("${columnName}_GREATER_THAN_MAX_LENGTH", col(${columnName}_COLUMN).rlike(("^.{${field.getValidation().getMinLength()},}")))
#end
#if (${field.getValidation().getMaxLength()})
.withColumn("${columnName}_LESS_THAN_MAX_LENGTH", col("${columnName}").rlike(("^.{${field.getValidation().getMaxLength()},}")).equalTo(lit(false)))
.withColumn("${columnName}_LESS_THAN_MAX_LENGTH", col(${columnName}_COLUMN).rlike(("^.{${field.getValidation().getMaxLength()},}")).equalTo(lit(false)))
#end
#foreach ($format in $field.getValidation().getFormats())
#if ($foreach.first)
.withColumn("${columnName}_MATCHES_FORMAT", col("${columnName}").rlike(("$format.replace("\","\\")"))
.withColumn("${columnName}_MATCHES_FORMAT", col(${columnName}_COLUMN).rlike(("$format.replace("\","\\")"))
#else
.or(col("${columnName}").rlike(("$format.replace("\","\\")")))
.or(col(${columnName}_COLUMN).rlike(("$format.replace("\","\\")")))
#end
#if ($foreach.last)
)
Expand All @@ -122,7 +122,7 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema {
#end ;

Column filterSchema = null;
List<String> validationColumns = new ArrayList();
List<String> validationColumns = new ArrayList<>();
Collections.addAll(validationColumns, dataWithValidations.columns());
validationColumns.removeAll(Arrays.asList(data.columns()));
for (String columnName : validationColumns) {
Expand All @@ -139,7 +139,8 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema {
}

// Remove validation columns from valid data
Seq<String> columnsToDrop = JavaConversions.asScalaBuffer(validationColumns).toSeq();
Seq<String> columnsToDrop =
JavaConverters.collectionAsScalaIterableConverter(validationColumns).asScala().toSeq();
validData = validData.drop(columnsToDrop);

return validData;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,15 +99,6 @@ ${step.encryptionSignature} {
datasetWithEncryptionPolicyApplied = ds;
#end
#end
#elseif ($step.hasMessagingInbound())
## Input was a message
#if ($step.hasInboundRecordType())
## Input was a message with custom record
Encoder<${step.inbound.recordType.name}> recordEncoder = Encoders.bean(${step.inbound.recordType.name}.class);
datasetWithEncryptionPolicyApplied = (${step.inbound.recordType.name}) ds.as(recordEncoder).collect();
#else
## Input was a message with string
#end
#end
}
}
Expand Down

0 comments on commit 07868a5

Please sign in to comment.