diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md
index d041a6ee36035..c8643ab12d292 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md
@@ -1,6 +1,8 @@
# Release History
-## 1.0.0-beta.20 (Unreleased)
+## 1.0.0-beta.1 (2022-12-06)
+
+- Azure Resource Manager DataFactory client library for Java. This package contains Microsoft Azure SDK for DataFactory Management SDK. The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. Package tag package-2018-06. For documentation on how to use this package, please see [Azure Management Libraries for Java](https://aka.ms/azsdk/java/mgmt).
### Features Added
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/README.md b/sdk/datafactory/azure-resourcemanager-datafactory/README.md
index e391f94e2114c..fe35ac03e5a0f 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/README.md
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/README.md
@@ -32,7 +32,7 @@ Various documentation is available to help you get started
com.azure.resourcemanager
azure-resourcemanager-datafactory
- 1.0.0-beta.19
+ 1.0.0-beta.20
```
[//]: # ({x-version-update-end})
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java
index e3b72eacf358a..b0053ea4665c4 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java
@@ -290,7 +290,7 @@ public DataFactoryManager authenticate(TokenCredential credential, AzureProfile
.append("-")
.append("com.azure.resourcemanager.datafactory")
.append("/")
- .append("1.0.0-beta.19");
+ .append("1.0.0-beta.1");
if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) {
userAgentBuilder
.append(" (")
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java
index 1f837ccb649a8..b3f331d52e0b2 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java
@@ -7,9 +7,13 @@
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference;
+import com.azure.resourcemanager.datafactory.models.ConfigurationType;
+import com.azure.resourcemanager.datafactory.models.SparkConfigurationParametrizationReference;
import com.azure.resourcemanager.datafactory.models.SynapseSparkJobReference;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
+import java.util.Map;
/** Execute spark job activity properties. */
@Fluent
@@ -33,6 +37,14 @@ public final class SynapseSparkJobActivityTypeProperties {
@JsonProperty(value = "file")
private Object file;
+ /*
+ * Scanning subfolders from the root folder of the main definition file, these files will be added as reference
+ * files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case
+ * sensitive. Type: boolean (or Expression with resultType boolean).
+ */
+ @JsonProperty(value = "scanFolder")
+ private Object scanFolder;
+
/*
* The fully-qualified identifier or the main class that is in the main definition file, which will override the
* 'className' of the spark job definition you provide. Type: string (or Expression with resultType string).
@@ -95,7 +107,26 @@ public final class SynapseSparkJobActivityTypeProperties {
* you provide.
*/
@JsonProperty(value = "numExecutors")
- private Integer numExecutors;
+ private Object numExecutors;
+
+ /*
+ * The type of the spark config.
+ */
+ @JsonProperty(value = "configurationType")
+ private ConfigurationType configurationType;
+
+ /*
+ * The spark configuration of the spark job.
+ */
+ @JsonProperty(value = "targetSparkConfiguration")
+ private SparkConfigurationParametrizationReference targetSparkConfiguration;
+
+ /*
+ * Spark configuration property.
+ */
+ @JsonProperty(value = "sparkConfig")
+ @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS)
+ private Map sparkConfig;
/** Creates an instance of SynapseSparkJobActivityTypeProperties class. */
public SynapseSparkJobActivityTypeProperties() {
@@ -163,6 +194,30 @@ public SynapseSparkJobActivityTypeProperties withFile(Object file) {
return this;
}
+ /**
+ * Get the scanFolder property: Scanning subfolders from the root folder of the main definition file, these files
+ * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and
+ * the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
+ *
+ * @return the scanFolder value.
+ */
+ public Object scanFolder() {
+ return this.scanFolder;
+ }
+
+ /**
+ * Set the scanFolder property: Scanning subfolders from the root folder of the main definition file, these files
+ * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and
+ * the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
+ *
+ * @param scanFolder the scanFolder value to set.
+ * @return the SynapseSparkJobActivityTypeProperties object itself.
+ */
+ public SynapseSparkJobActivityTypeProperties withScanFolder(Object scanFolder) {
+ this.scanFolder = scanFolder;
+ return this;
+ }
+
/**
* Get the className property: The fully-qualified identifier or the main class that is in the main definition file,
* which will override the 'className' of the spark job definition you provide. Type: string (or Expression with
@@ -352,7 +407,7 @@ public SynapseSparkJobActivityTypeProperties withDriverSize(Object driverSize) {
*
* @return the numExecutors value.
*/
- public Integer numExecutors() {
+ public Object numExecutors() {
return this.numExecutors;
}
@@ -363,11 +418,72 @@ public Integer numExecutors() {
* @param numExecutors the numExecutors value to set.
* @return the SynapseSparkJobActivityTypeProperties object itself.
*/
- public SynapseSparkJobActivityTypeProperties withNumExecutors(Integer numExecutors) {
+ public SynapseSparkJobActivityTypeProperties withNumExecutors(Object numExecutors) {
this.numExecutors = numExecutors;
return this;
}
+ /**
+ * Get the configurationType property: The type of the spark config.
+ *
+ * @return the configurationType value.
+ */
+ public ConfigurationType configurationType() {
+ return this.configurationType;
+ }
+
+ /**
+ * Set the configurationType property: The type of the spark config.
+ *
+ * @param configurationType the configurationType value to set.
+ * @return the SynapseSparkJobActivityTypeProperties object itself.
+ */
+ public SynapseSparkJobActivityTypeProperties withConfigurationType(ConfigurationType configurationType) {
+ this.configurationType = configurationType;
+ return this;
+ }
+
+ /**
+ * Get the targetSparkConfiguration property: The spark configuration of the spark job.
+ *
+ * @return the targetSparkConfiguration value.
+ */
+ public SparkConfigurationParametrizationReference targetSparkConfiguration() {
+ return this.targetSparkConfiguration;
+ }
+
+ /**
+ * Set the targetSparkConfiguration property: The spark configuration of the spark job.
+ *
+ * @param targetSparkConfiguration the targetSparkConfiguration value to set.
+ * @return the SynapseSparkJobActivityTypeProperties object itself.
+ */
+ public SynapseSparkJobActivityTypeProperties withTargetSparkConfiguration(
+ SparkConfigurationParametrizationReference targetSparkConfiguration) {
+ this.targetSparkConfiguration = targetSparkConfiguration;
+ return this;
+ }
+
+ /**
+ * Get the sparkConfig property: Spark configuration property.
+ *
+ * @return the sparkConfig value.
+ */
+ public Map sparkConfig() {
+ return this.sparkConfig;
+ }
+
+ /**
+ * Set the sparkConfig property: Spark configuration property.
+ *
+ * @param sparkConfig the sparkConfig value to set.
+ * @return the SynapseSparkJobActivityTypeProperties object itself.
+ */
+ public SynapseSparkJobActivityTypeProperties withSparkConfig(Map sparkConfig) {
+ this.sparkConfig = sparkConfig;
+ return this;
+ }
+
/**
* Validates the instance.
*
@@ -385,6 +501,9 @@ public void validate() {
if (targetBigDataPool() != null) {
targetBigDataPool().validate();
}
+ if (targetSparkConfiguration() != null) {
+ targetSparkConfiguration().validate();
+ }
}
private static final ClientLogger LOGGER = new ClientLogger(SynapseSparkJobActivityTypeProperties.class);
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConfigurationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConfigurationType.java
new file mode 100644
index 0000000000000..22f33fe475b15
--- /dev/null
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConfigurationType.java
@@ -0,0 +1,41 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+// Code generated by Microsoft (R) AutoRest Code Generator.
+
+package com.azure.resourcemanager.datafactory.models;
+
+import com.azure.core.util.ExpandableStringEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import java.util.Collection;
+
+/** The type of the spark config. */
+public final class ConfigurationType extends ExpandableStringEnum {
+ /** Static value Default for ConfigurationType. */
+ public static final ConfigurationType DEFAULT = fromString("Default");
+
+ /** Static value Customized for ConfigurationType. */
+ public static final ConfigurationType CUSTOMIZED = fromString("Customized");
+
+ /** Static value Artifact for ConfigurationType. */
+ public static final ConfigurationType ARTIFACT = fromString("Artifact");
+
+ /**
+ * Creates or finds a ConfigurationType from its string representation.
+ *
+ * @param name a name to look for.
+ * @return the corresponding ConfigurationType.
+ */
+ @JsonCreator
+ public static ConfigurationType fromString(String name) {
+ return fromString(name, ConfigurationType.class);
+ }
+
+ /**
+ * Gets known ConfigurationType values.
+ *
+ * @return known ConfigurationType values.
+ */
+ public static Collection values() {
+ return values(ConfigurationType.class);
+ }
+}
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java
new file mode 100644
index 0000000000000..9b1aba3efdcc3
--- /dev/null
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java
@@ -0,0 +1,93 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+// Code generated by Microsoft (R) AutoRest Code Generator.
+
+package com.azure.resourcemanager.datafactory.models;
+
+import com.azure.core.annotation.Fluent;
+import com.azure.core.util.logging.ClientLogger;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/** Spark configuration reference. */
+@Fluent
+public final class SparkConfigurationParametrizationReference {
+ /*
+ * Spark configuration reference type.
+ */
+ @JsonProperty(value = "type", required = true)
+ private SparkConfigurationReferenceType type;
+
+ /*
+ * Reference spark configuration name. Type: string (or Expression with resultType string).
+ */
+ @JsonProperty(value = "referenceName", required = true)
+ private Object referenceName;
+
+ /** Creates an instance of SparkConfigurationParametrizationReference class. */
+ public SparkConfigurationParametrizationReference() {
+ }
+
+ /**
+ * Get the type property: Spark configuration reference type.
+ *
+ * @return the type value.
+ */
+ public SparkConfigurationReferenceType type() {
+ return this.type;
+ }
+
+ /**
+ * Set the type property: Spark configuration reference type.
+ *
+ * @param type the type value to set.
+ * @return the SparkConfigurationParametrizationReference object itself.
+ */
+ public SparkConfigurationParametrizationReference withType(SparkConfigurationReferenceType type) {
+ this.type = type;
+ return this;
+ }
+
+ /**
+ * Get the referenceName property: Reference spark configuration name. Type: string (or Expression with resultType
+ * string).
+ *
+ * @return the referenceName value.
+ */
+ public Object referenceName() {
+ return this.referenceName;
+ }
+
+ /**
+ * Set the referenceName property: Reference spark configuration name. Type: string (or Expression with resultType
+ * string).
+ *
+ * @param referenceName the referenceName value to set.
+ * @return the SparkConfigurationParametrizationReference object itself.
+ */
+ public SparkConfigurationParametrizationReference withReferenceName(Object referenceName) {
+ this.referenceName = referenceName;
+ return this;
+ }
+
+ /**
+ * Validates the instance.
+ *
+ * @throws IllegalArgumentException thrown if the instance is not valid.
+ */
+ public void validate() {
+ if (type() == null) {
+ throw LOGGER
+ .logExceptionAsError(
+ new IllegalArgumentException(
+ "Missing required property type in model SparkConfigurationParametrizationReference"));
+ }
+ if (referenceName() == null) {
+ throw LOGGER
+ .logExceptionAsError(
+ new IllegalArgumentException(
+ "Missing required property referenceName in model SparkConfigurationParametrizationReference"));
+ }
+ }
+
+ private static final ClientLogger LOGGER = new ClientLogger(SparkConfigurationParametrizationReference.class);
+}
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationReferenceType.java
new file mode 100644
index 0000000000000..796c915c01f78
--- /dev/null
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationReferenceType.java
@@ -0,0 +1,36 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+// Code generated by Microsoft (R) AutoRest Code Generator.
+
+package com.azure.resourcemanager.datafactory.models;
+
+import com.azure.core.util.ExpandableStringEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import java.util.Collection;
+
+/** Spark configuration reference type. */
+public final class SparkConfigurationReferenceType extends ExpandableStringEnum {
+ /** Static value SparkConfigurationReference for SparkConfigurationReferenceType. */
+ public static final SparkConfigurationReferenceType SPARK_CONFIGURATION_REFERENCE =
+ fromString("SparkConfigurationReference");
+
+ /**
+ * Creates or finds a SparkConfigurationReferenceType from its string representation.
+ *
+ * @param name a name to look for.
+ * @return the corresponding SparkConfigurationReferenceType.
+ */
+ @JsonCreator
+ public static SparkConfigurationReferenceType fromString(String name) {
+ return fromString(name, SparkConfigurationReferenceType.class);
+ }
+
+ /**
+ * Gets known SparkConfigurationReferenceType values.
+ *
+ * @return known SparkConfigurationReferenceType values.
+ */
+ public static Collection values() {
+ return values(SparkConfigurationReferenceType.class);
+ }
+}
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java
index bbd58a73e98dd..2a4976084eed7 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java
@@ -11,6 +11,7 @@
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import java.util.List;
+import java.util.Map;
/** Execute spark job activity. */
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type")
@@ -149,6 +150,33 @@ public SynapseSparkJobDefinitionActivity withFile(Object file) {
return this;
}
+ /**
+ * Get the scanFolder property: Scanning subfolders from the root folder of the main definition file, these files
+ * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and
+ * the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
+ *
+ * @return the scanFolder value.
+ */
+ public Object scanFolder() {
+ return this.innerTypeProperties() == null ? null : this.innerTypeProperties().scanFolder();
+ }
+
+ /**
+ * Set the scanFolder property: Scanning subfolders from the root folder of the main definition file, these files
+ * will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and
+ * the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
+ *
+ * @param scanFolder the scanFolder value to set.
+ * @return the SynapseSparkJobDefinitionActivity object itself.
+ */
+ public SynapseSparkJobDefinitionActivity withScanFolder(Object scanFolder) {
+ if (this.innerTypeProperties() == null) {
+ this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties();
+ }
+ this.innerTypeProperties().withScanFolder(scanFolder);
+ return this;
+ }
+
/**
* Get the className property: The fully-qualified identifier or the main class that is in the main definition file,
* which will override the 'className' of the spark job definition you provide. Type: string (or Expression with
@@ -362,7 +390,7 @@ public SynapseSparkJobDefinitionActivity withDriverSize(Object driverSize) {
*
* @return the numExecutors value.
*/
- public Integer numExecutors() {
+ public Object numExecutors() {
return this.innerTypeProperties() == null ? null : this.innerTypeProperties().numExecutors();
}
@@ -373,7 +401,7 @@ public Integer numExecutors() {
* @param numExecutors the numExecutors value to set.
* @return the SynapseSparkJobDefinitionActivity object itself.
*/
- public SynapseSparkJobDefinitionActivity withNumExecutors(Integer numExecutors) {
+ public SynapseSparkJobDefinitionActivity withNumExecutors(Object numExecutors) {
if (this.innerTypeProperties() == null) {
this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties();
}
@@ -381,6 +409,76 @@ public SynapseSparkJobDefinitionActivity withNumExecutors(Integer numExecutors)
return this;
}
+ /**
+ * Get the configurationType property: The type of the spark config.
+ *
+ * @return the configurationType value.
+ */
+ public ConfigurationType configurationType() {
+ return this.innerTypeProperties() == null ? null : this.innerTypeProperties().configurationType();
+ }
+
+ /**
+ * Set the configurationType property: The type of the spark config.
+ *
+ * @param configurationType the configurationType value to set.
+ * @return the SynapseSparkJobDefinitionActivity object itself.
+ */
+ public SynapseSparkJobDefinitionActivity withConfigurationType(ConfigurationType configurationType) {
+ if (this.innerTypeProperties() == null) {
+ this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties();
+ }
+ this.innerTypeProperties().withConfigurationType(configurationType);
+ return this;
+ }
+
+ /**
+ * Get the targetSparkConfiguration property: The spark configuration of the spark job.
+ *
+ * @return the targetSparkConfiguration value.
+ */
+ public SparkConfigurationParametrizationReference targetSparkConfiguration() {
+ return this.innerTypeProperties() == null ? null : this.innerTypeProperties().targetSparkConfiguration();
+ }
+
+ /**
+ * Set the targetSparkConfiguration property: The spark configuration of the spark job.
+ *
+ * @param targetSparkConfiguration the targetSparkConfiguration value to set.
+ * @return the SynapseSparkJobDefinitionActivity object itself.
+ */
+ public SynapseSparkJobDefinitionActivity withTargetSparkConfiguration(
+ SparkConfigurationParametrizationReference targetSparkConfiguration) {
+ if (this.innerTypeProperties() == null) {
+ this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties();
+ }
+ this.innerTypeProperties().withTargetSparkConfiguration(targetSparkConfiguration);
+ return this;
+ }
+
+ /**
+ * Get the sparkConfig property: Spark configuration property.
+ *
+ * @return the sparkConfig value.
+ */
+ public Map sparkConfig() {
+ return this.innerTypeProperties() == null ? null : this.innerTypeProperties().sparkConfig();
+ }
+
+ /**
+ * Set the sparkConfig property: Spark configuration property.
+ *
+ * @param sparkConfig the sparkConfig value to set.
+ * @return the SynapseSparkJobDefinitionActivity object itself.
+ */
+ public SynapseSparkJobDefinitionActivity withSparkConfig(Map sparkConfig) {
+ if (this.innerTypeProperties() == null) {
+ this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties();
+ }
+ this.innerTypeProperties().withSparkConfig(sparkConfig);
+ return this;
+ }
+
/**
* Validates the instance.
*