Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ReleasePR sdk/synapse/mgmt-v2019_06_01_preview] attributes added in LibraryInfo in BigDataPool specs #17223

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions sdk/synapse/mgmt-v2019_06_01_preview/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@
<parent>
<groupId>com.microsoft.azure</groupId>
<artifactId>azure-arm-parent</artifactId>
<version>1.3.2</version>
<relativePath>../../parents/azure-arm-parent/pom.xml</relativePath>
<version>1.1.0</version>
<relativePath>../../../pom.management.xml</relativePath>
</parent>
<artifactId>azure-mgmt-synapse</artifactId>
<version>1.0.0-beta-3</version>
<version>1.0.0-beta</version>
<packaging>jar</packaging>
<name>Microsoft Azure SDK for Synapse Management</name>
<description>This package contains Microsoft Synapse Management SDK.</description>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
import com.microsoft.azure.ProxyResource;

/**
* The resource model definition for a Azure Resource Manager resource with an
* Entity Resource.
* The resource model definition for an Azure Resource Manager resource with an
* etag.
*/
public class AzureEntityResource extends ProxyResource {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/

package com.microsoft.azure.management.synapse.v2019_06_01_preview;

import com.fasterxml.jackson.annotation.JsonProperty;

/**
* Babylon Configuration.
*/
public class BabylonConfiguration {
/**
* Babylon Resource ID.
*/
@JsonProperty(value = "babylonResourceId")
private String babylonResourceId;

/**
* Get babylon Resource ID.
*
* @return the babylonResourceId value
*/
public String babylonResourceId() {
return this.babylonResourceId;
}

/**
* Set babylon Resource ID.
*
* @param babylonResourceId the babylonResourceId value to set
* @return the BabylonConfiguration object itself.
*/
public BabylonConfiguration withBabylonResourceId(String babylonResourceId) {
this.babylonResourceId = babylonResourceId;
return this;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import com.microsoft.azure.management.synapse.v2019_06_01_preview.implementation.SynapseManager;
import java.util.Map;
import org.joda.time.DateTime;
import java.util.List;

/**
* Type representing BigDataPoolResourceInfo.
Expand All @@ -39,6 +40,11 @@ public interface BigDataPoolResourceInfo extends HasInner<BigDataPoolResourceInf
*/
DateTime creationDate();

/**
* @return the customLibraries value.
*/
List<LibraryInfo> customLibraries();

/**
* @return the defaultSparkLogFolder value.
*/
Expand Down Expand Up @@ -89,6 +95,16 @@ public interface BigDataPoolResourceInfo extends HasInner<BigDataPoolResourceInf
*/
String provisioningState();

/**
* @return the sessionLevelPackagesEnabled value.
*/
Boolean sessionLevelPackagesEnabled();

/**
* @return the sparkConfigProperties value.
*/
LibraryRequirements sparkConfigProperties();

/**
* @return the sparkEventsFolder value.
*/
Expand Down Expand Up @@ -198,6 +214,18 @@ interface WithCreationDate {
WithCreate withCreationDate(DateTime creationDate);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify CustomLibraries.
*/
interface WithCustomLibraries {
/**
* Specifies customLibraries.
* @param customLibraries List of custom libraries/packages associated with the spark pool
* @return the next definition stage
*/
WithCreate withCustomLibraries(List<LibraryInfo> customLibraries);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify DefaultSparkLogFolder.
*/
Expand Down Expand Up @@ -252,7 +280,7 @@ interface WithNodeCount {
interface WithNodeSize {
/**
* Specifies nodeSize.
* @param nodeSize The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', 'Large', 'XLarge', 'XXLarge'
* @param nodeSize The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', 'Large', 'XLarge', 'XXLarge', 'XXXLarge'
* @return the next definition stage
*/
WithCreate withNodeSize(NodeSize nodeSize);
Expand Down Expand Up @@ -282,6 +310,30 @@ interface WithProvisioningState {
WithCreate withProvisioningState(String provisioningState);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify SessionLevelPackagesEnabled.
*/
interface WithSessionLevelPackagesEnabled {
/**
* Specifies sessionLevelPackagesEnabled.
* @param sessionLevelPackagesEnabled Whether session level library/package management is enabled or not
* @return the next definition stage
*/
WithCreate withSessionLevelPackagesEnabled(Boolean sessionLevelPackagesEnabled);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify SparkConfigProperties.
*/
interface WithSparkConfigProperties {
/**
* Specifies sparkConfigProperties.
* @param sparkConfigProperties Spark configuration file to specify additional properties
* @return the next definition stage
*/
WithCreate withSparkConfigProperties(LibraryRequirements sparkConfigProperties);
}

/**
* The stage of the bigdatapoolresourceinfo definition allowing to specify SparkEventsFolder.
*/
Expand Down Expand Up @@ -323,13 +375,13 @@ interface WithTags {
* the resource to be created (via {@link WithCreate#create()}), but also allows
* for any other optional settings to be specified.
*/
interface WithCreate extends Creatable<BigDataPoolResourceInfo>, DefinitionStages.WithAutoPause, DefinitionStages.WithAutoScale, DefinitionStages.WithCreationDate, DefinitionStages.WithDefaultSparkLogFolder, DefinitionStages.WithIsComputeIsolationEnabled, DefinitionStages.WithLibraryRequirements, DefinitionStages.WithNodeCount, DefinitionStages.WithNodeSize, DefinitionStages.WithNodeSizeFamily, DefinitionStages.WithProvisioningState, DefinitionStages.WithSparkEventsFolder, DefinitionStages.WithSparkVersion, DefinitionStages.WithTags {
interface WithCreate extends Creatable<BigDataPoolResourceInfo>, DefinitionStages.WithAutoPause, DefinitionStages.WithAutoScale, DefinitionStages.WithCreationDate, DefinitionStages.WithCustomLibraries, DefinitionStages.WithDefaultSparkLogFolder, DefinitionStages.WithIsComputeIsolationEnabled, DefinitionStages.WithLibraryRequirements, DefinitionStages.WithNodeCount, DefinitionStages.WithNodeSize, DefinitionStages.WithNodeSizeFamily, DefinitionStages.WithProvisioningState, DefinitionStages.WithSessionLevelPackagesEnabled, DefinitionStages.WithSparkConfigProperties, DefinitionStages.WithSparkEventsFolder, DefinitionStages.WithSparkVersion, DefinitionStages.WithTags {
}
}
/**
* The template for a BigDataPoolResourceInfo update operation, containing all the settings that can be modified.
*/
interface Update extends Appliable<BigDataPoolResourceInfo>, UpdateStages.WithForce, UpdateStages.WithAutoPause, UpdateStages.WithAutoScale, UpdateStages.WithCreationDate, UpdateStages.WithDefaultSparkLogFolder, UpdateStages.WithIsComputeIsolationEnabled, UpdateStages.WithLibraryRequirements, UpdateStages.WithNodeCount, UpdateStages.WithNodeSize, UpdateStages.WithNodeSizeFamily, UpdateStages.WithProvisioningState, UpdateStages.WithSparkEventsFolder, UpdateStages.WithSparkVersion, UpdateStages.WithTags {
interface Update extends Appliable<BigDataPoolResourceInfo>, UpdateStages.WithForce, UpdateStages.WithAutoPause, UpdateStages.WithAutoScale, UpdateStages.WithCreationDate, UpdateStages.WithCustomLibraries, UpdateStages.WithDefaultSparkLogFolder, UpdateStages.WithIsComputeIsolationEnabled, UpdateStages.WithLibraryRequirements, UpdateStages.WithNodeCount, UpdateStages.WithNodeSize, UpdateStages.WithNodeSizeFamily, UpdateStages.WithProvisioningState, UpdateStages.WithSessionLevelPackagesEnabled, UpdateStages.WithSparkConfigProperties, UpdateStages.WithSparkEventsFolder, UpdateStages.WithSparkVersion, UpdateStages.WithTags {
}

/**
Expand Down Expand Up @@ -384,6 +436,18 @@ interface WithCreationDate {
Update withCreationDate(DateTime creationDate);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify CustomLibraries.
*/
interface WithCustomLibraries {
/**
* Specifies customLibraries.
* @param customLibraries List of custom libraries/packages associated with the spark pool
* @return the next update stage
*/
Update withCustomLibraries(List<LibraryInfo> customLibraries);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify DefaultSparkLogFolder.
*/
Expand Down Expand Up @@ -438,7 +502,7 @@ interface WithNodeCount {
interface WithNodeSize {
/**
* Specifies nodeSize.
* @param nodeSize The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', 'Large', 'XLarge', 'XXLarge'
* @param nodeSize The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', 'Large', 'XLarge', 'XXLarge', 'XXXLarge'
* @return the next update stage
*/
Update withNodeSize(NodeSize nodeSize);
Expand Down Expand Up @@ -468,6 +532,30 @@ interface WithProvisioningState {
Update withProvisioningState(String provisioningState);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify SessionLevelPackagesEnabled.
*/
interface WithSessionLevelPackagesEnabled {
/**
* Specifies sessionLevelPackagesEnabled.
* @param sessionLevelPackagesEnabled Whether session level library/package management is enabled or not
* @return the next update stage
*/
Update withSessionLevelPackagesEnabled(Boolean sessionLevelPackagesEnabled);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify SparkConfigProperties.
*/
interface WithSparkConfigProperties {
/**
* Specifies sparkConfigProperties.
* @param sparkConfigProperties Spark configuration file to specify additional properties
* @return the next update stage
*/
Update withSparkConfigProperties(LibraryRequirements sparkConfigProperties);
}

/**
* The stage of the bigdatapoolresourceinfo update allowing to specify SparkEventsFolder.
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/

package com.microsoft.azure.management.synapse.v2019_06_01_preview;

import com.fasterxml.jackson.annotation.JsonProperty;

/**
* Details of the customer managed key associated with the workspace.
*/
public class CustomerManagedKeyDetails {
/**
* The customer managed key status on the workspace.
*/
@JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY)
private String status;

/**
* The key object of the workspace.
*/
@JsonProperty(value = "key")
private WorkspaceKeyDetails key;

/**
* Get the customer managed key status on the workspace.
*
* @return the status value
*/
public String status() {
return this.status;
}

/**
* Get the key object of the workspace.
*
* @return the key value
*/
public WorkspaceKeyDetails key() {
return this.key;
}

/**
* Set the key object of the workspace.
*
* @param key the key value to set
* @return the CustomerManagedKeyDetails object itself.
*/
public CustomerManagedKeyDetails withKey(WorkspaceKeyDetails key) {
this.key = key;
return this;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/

package com.microsoft.azure.management.synapse.v2019_06_01_preview;

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;

/**
* Defines values for DataMaskingFunction.
*/
public enum DataMaskingFunction {
/** Enum value Default. */
DEFAULT("Default"),

/** Enum value CCN. */
CCN("CCN"),

/** Enum value Email. */
EMAIL("Email"),

/** Enum value Number. */
NUMBER("Number"),

/** Enum value SSN. */
SSN("SSN"),

/** Enum value Text. */
TEXT("Text");

/** The actual serialized value for a DataMaskingFunction instance. */
private String value;

DataMaskingFunction(String value) {
this.value = value;
}

/**
* Parses a serialized value to a DataMaskingFunction instance.
*
* @param value the serialized value to parse.
* @return the parsed DataMaskingFunction object, or null if unable to parse.
*/
@JsonCreator
public static DataMaskingFunction fromString(String value) {
DataMaskingFunction[] items = DataMaskingFunction.values();
for (DataMaskingFunction item : items) {
if (item.toString().equalsIgnoreCase(value)) {
return item;
}
}
return null;
}

@JsonValue
@Override
public String toString() {
return this.value;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/

package com.microsoft.azure.management.synapse.v2019_06_01_preview;

import com.microsoft.azure.arm.collection.SupportsCreating;
import rx.Observable;
import com.microsoft.azure.management.synapse.v2019_06_01_preview.implementation.DataMaskingPoliciesInner;
import com.microsoft.azure.arm.model.HasInner;

/**
* Type representing DataMaskingPolicies.
*/
public interface DataMaskingPolicies extends SupportsCreating<DataMaskingPolicy.DefinitionStages.Blank>, HasInner<DataMaskingPoliciesInner> {
/**
* Gets a Sql pool data masking policy.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param workspaceName The name of the workspace
* @param sqlPoolName SQL pool name
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<DataMaskingPolicy> getAsync(String resourceGroupName, String workspaceName, String sqlPoolName);

}
Loading