Skip to content

Commit

Permalink
CodeGen from PR 21768 in Azure/azure-rest-api-specs
Browse files Browse the repository at this point in the history
Merge d5b6fb7072b128371ecc9c8fa42a883182445a39 into 903a8802a13b5f3c05cdceea9ba4716aaee5de45
  • Loading branch information
SDKAuto committed Dec 6, 2022
1 parent dc6b7d5 commit 54304be
Show file tree
Hide file tree
Showing 8 changed files with 397 additions and 8 deletions.
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# Release History

## 1.0.0-beta.20 (Unreleased)
## 1.0.0-beta.1 (2022-12-06)

- Azure Resource Manager DataFactory client library for Java. This package contains Microsoft Azure SDK for DataFactory Management SDK. The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. Package tag package-2018-06. For documentation on how to use this package, please see [Azure Management Libraries for Java](https://aka.ms/azsdk/java/mgmt).

### Features Added

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ Various documentation is available to help you get started
<dependency>
<groupId>com.azure.resourcemanager</groupId>
<artifactId>azure-resourcemanager-datafactory</artifactId>
<version>1.0.0-beta.19</version>
<version>1.0.0-beta.20</version>
</dependency>
```
[//]: # ({x-version-update-end})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ public DataFactoryManager authenticate(TokenCredential credential, AzureProfile
.append("-")
.append("com.azure.resourcemanager.datafactory")
.append("/")
.append("1.0.0-beta.19");
.append("1.0.0-beta.1");
if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) {
userAgentBuilder
.append(" (")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,13 @@
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference;
import com.azure.resourcemanager.datafactory.models.ConfigurationType;
import com.azure.resourcemanager.datafactory.models.SparkConfigurationParametrizationReference;
import com.azure.resourcemanager.datafactory.models.SynapseSparkJobReference;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import java.util.Map;

/** Execute spark job activity properties. */
@Fluent
Expand All @@ -33,6 +37,14 @@ public final class SynapseSparkJobActivityTypeProperties {
@JsonProperty(value = "file")
private Object file;

/*
* Scanning subfolders from the root folder of the main definiton file, these files will be added as reference
* files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case
* sensitive. Type: boolean (or Expression with resultType boolean).
*/
@JsonProperty(value = "scanFolder")
private Object scanFolder;

/*
* The fully-qualified identifier or the main class that is in the main definition file, which will override the
* 'className' of the spark job definition you provide. Type: string (or Expression with resultType string).
Expand Down Expand Up @@ -95,7 +107,26 @@ public final class SynapseSparkJobActivityTypeProperties {
* you provide.
*/
@JsonProperty(value = "numExecutors")
private Integer numExecutors;
private Object numExecutors;

/*
* The type of the spark config.
*/
@JsonProperty(value = "configurationType")
private ConfigurationType configurationType;

/*
* The spark configuration of the spark job.
*/
@JsonProperty(value = "targetSparkConfiguration")
private SparkConfigurationParametrizationReference targetSparkConfiguration;

/*
* Spark configuration property.
*/
@JsonProperty(value = "sparkConfig")
@JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS)
private Map<String, Object> sparkConfig;

/** Creates an instance of SynapseSparkJobActivityTypeProperties class. */
public SynapseSparkJobActivityTypeProperties() {
Expand Down Expand Up @@ -163,6 +194,30 @@ public SynapseSparkJobActivityTypeProperties withFile(Object file) {
return this;
}

/**
* Get the scanFolder property: Scanning subfolders from the root folder of the main definiton file, these files
* will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and
* the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
*
* @return the scanFolder value.
*/
public Object scanFolder() {
return this.scanFolder;
}

/**
* Set the scanFolder property: Scanning subfolders from the root folder of the main definiton file, these files
* will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and
* the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
*
* @param scanFolder the scanFolder value to set.
* @return the SynapseSparkJobActivityTypeProperties object itself.
*/
public SynapseSparkJobActivityTypeProperties withScanFolder(Object scanFolder) {
this.scanFolder = scanFolder;
return this;
}

/**
* Get the className property: The fully-qualified identifier or the main class that is in the main definition file,
* which will override the 'className' of the spark job definition you provide. Type: string (or Expression with
Expand Down Expand Up @@ -352,7 +407,7 @@ public SynapseSparkJobActivityTypeProperties withDriverSize(Object driverSize) {
*
* @return the numExecutors value.
*/
public Integer numExecutors() {
public Object numExecutors() {
return this.numExecutors;
}

Expand All @@ -363,11 +418,72 @@ public Integer numExecutors() {
* @param numExecutors the numExecutors value to set.
* @return the SynapseSparkJobActivityTypeProperties object itself.
*/
public SynapseSparkJobActivityTypeProperties withNumExecutors(Integer numExecutors) {
public SynapseSparkJobActivityTypeProperties withNumExecutors(Object numExecutors) {
this.numExecutors = numExecutors;
return this;
}

/**
* Get the configurationType property: The type of the spark config.
*
* @return the configurationType value.
*/
public ConfigurationType configurationType() {
return this.configurationType;
}

/**
* Set the configurationType property: The type of the spark config.
*
* @param configurationType the configurationType value to set.
* @return the SynapseSparkJobActivityTypeProperties object itself.
*/
public SynapseSparkJobActivityTypeProperties withConfigurationType(ConfigurationType configurationType) {
this.configurationType = configurationType;
return this;
}

/**
* Get the targetSparkConfiguration property: The spark configuration of the spark job.
*
* @return the targetSparkConfiguration value.
*/
public SparkConfigurationParametrizationReference targetSparkConfiguration() {
return this.targetSparkConfiguration;
}

/**
* Set the targetSparkConfiguration property: The spark configuration of the spark job.
*
* @param targetSparkConfiguration the targetSparkConfiguration value to set.
* @return the SynapseSparkJobActivityTypeProperties object itself.
*/
public SynapseSparkJobActivityTypeProperties withTargetSparkConfiguration(
SparkConfigurationParametrizationReference targetSparkConfiguration) {
this.targetSparkConfiguration = targetSparkConfiguration;
return this;
}

/**
* Get the sparkConfig property: Spark configuration property.
*
* @return the sparkConfig value.
*/
public Map<String, Object> sparkConfig() {
return this.sparkConfig;
}

/**
* Set the sparkConfig property: Spark configuration property.
*
* @param sparkConfig the sparkConfig value to set.
* @return the SynapseSparkJobActivityTypeProperties object itself.
*/
public SynapseSparkJobActivityTypeProperties withSparkConfig(Map<String, Object> sparkConfig) {
this.sparkConfig = sparkConfig;
return this;
}

/**
* Validates the instance.
*
Expand All @@ -385,6 +501,9 @@ public void validate() {
if (targetBigDataPool() != null) {
targetBigDataPool().validate();
}
if (targetSparkConfiguration() != null) {
targetSparkConfiguration().validate();
}
}

private static final ClientLogger LOGGER = new ClientLogger(SynapseSparkJobActivityTypeProperties.class);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.

package com.azure.resourcemanager.datafactory.models;

import com.azure.core.util.ExpandableStringEnum;
import com.fasterxml.jackson.annotation.JsonCreator;
import java.util.Collection;

/** The type of the spark config. */
public final class ConfigurationType extends ExpandableStringEnum<ConfigurationType> {
/** Static value Default for ConfigurationType. */
public static final ConfigurationType DEFAULT = fromString("Default");

/** Static value Customized for ConfigurationType. */
public static final ConfigurationType CUSTOMIZED = fromString("Customized");

/** Static value Artifact for ConfigurationType. */
public static final ConfigurationType ARTIFACT = fromString("Artifact");

/**
* Creates or finds a ConfigurationType from its string representation.
*
* @param name a name to look for.
* @return the corresponding ConfigurationType.
*/
@JsonCreator
public static ConfigurationType fromString(String name) {
return fromString(name, ConfigurationType.class);
}

/**
* Gets known ConfigurationType values.
*
* @return known ConfigurationType values.
*/
public static Collection<ConfigurationType> values() {
return values(ConfigurationType.class);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.

package com.azure.resourcemanager.datafactory.models;

import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonProperty;

/** Spark configuration reference. */
@Fluent
public final class SparkConfigurationParametrizationReference {
/*
* Spark configuration reference type.
*/
@JsonProperty(value = "type", required = true)
private SparkConfigurationReferenceType type;

/*
* Reference spark configuration name. Type: string (or Expression with resultType string).
*/
@JsonProperty(value = "referenceName", required = true)
private Object referenceName;

/** Creates an instance of SparkConfigurationParametrizationReference class. */
public SparkConfigurationParametrizationReference() {
}

/**
* Get the type property: Spark configuration reference type.
*
* @return the type value.
*/
public SparkConfigurationReferenceType type() {
return this.type;
}

/**
* Set the type property: Spark configuration reference type.
*
* @param type the type value to set.
* @return the SparkConfigurationParametrizationReference object itself.
*/
public SparkConfigurationParametrizationReference withType(SparkConfigurationReferenceType type) {
this.type = type;
return this;
}

/**
* Get the referenceName property: Reference spark configuration name. Type: string (or Expression with resultType
* string).
*
* @return the referenceName value.
*/
public Object referenceName() {
return this.referenceName;
}

/**
* Set the referenceName property: Reference spark configuration name. Type: string (or Expression with resultType
* string).
*
* @param referenceName the referenceName value to set.
* @return the SparkConfigurationParametrizationReference object itself.
*/
public SparkConfigurationParametrizationReference withReferenceName(Object referenceName) {
this.referenceName = referenceName;
return this;
}

/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (type() == null) {
throw LOGGER
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property type in model SparkConfigurationParametrizationReference"));
}
if (referenceName() == null) {
throw LOGGER
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property referenceName in model SparkConfigurationParametrizationReference"));
}
}

private static final ClientLogger LOGGER = new ClientLogger(SparkConfigurationParametrizationReference.class);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.

package com.azure.resourcemanager.datafactory.models;

import com.azure.core.util.ExpandableStringEnum;
import com.fasterxml.jackson.annotation.JsonCreator;
import java.util.Collection;

/** Spark configuration reference type. */
public final class SparkConfigurationReferenceType extends ExpandableStringEnum<SparkConfigurationReferenceType> {
/** Static value SparkConfigurationReference for SparkConfigurationReferenceType. */
public static final SparkConfigurationReferenceType SPARK_CONFIGURATION_REFERENCE =
fromString("SparkConfigurationReference");

/**
* Creates or finds a SparkConfigurationReferenceType from its string representation.
*
* @param name a name to look for.
* @return the corresponding SparkConfigurationReferenceType.
*/
@JsonCreator
public static SparkConfigurationReferenceType fromString(String name) {
return fromString(name, SparkConfigurationReferenceType.class);
}

/**
* Gets known SparkConfigurationReferenceType values.
*
* @return known SparkConfigurationReferenceType values.
*/
public static Collection<SparkConfigurationReferenceType> values() {
return values(SparkConfigurationReferenceType.class);
}
}
Loading

0 comments on commit 54304be

Please sign in to comment.