diff --git a/profiles/latest/datafactory/mgmt/datafactory/models.go b/profiles/latest/datafactory/mgmt/datafactory/models.go index 1b22ccf44bf2..9f2331a40b3f 100644 --- a/profiles/latest/datafactory/mgmt/datafactory/models.go +++ b/profiles/latest/datafactory/mgmt/datafactory/models.go @@ -97,6 +97,14 @@ const ( TWO CassandraSourceReadConsistencyLevels = original.TWO ) +type DataFlowComputeType = original.DataFlowComputeType + +const ( + ComputeOptimized DataFlowComputeType = original.ComputeOptimized + General DataFlowComputeType = original.General + MemoryOptimized DataFlowComputeType = original.MemoryOptimized +) + type DataFlowDebugCommandType = original.DataFlowDebugCommandType const ( @@ -1369,6 +1377,8 @@ type DataFlowDebugCommandResponse = original.DataFlowDebugCommandResponse type DataFlowDebugPackage = original.DataFlowDebugPackage type DataFlowDebugPackageDebugSettings = original.DataFlowDebugPackageDebugSettings type DataFlowDebugSessionClient = original.DataFlowDebugSessionClient +type DataFlowDebugSessionCreateFuture = original.DataFlowDebugSessionCreateFuture +type DataFlowDebugSessionExecuteCommandFuture = original.DataFlowDebugSessionExecuteCommandFuture type DataFlowDebugSessionInfo = original.DataFlowDebugSessionInfo type DataFlowFolder = original.DataFlowFolder type DataFlowListResponse = original.DataFlowListResponse @@ -1573,6 +1583,7 @@ type IntegrationRuntimeAuthKeys = original.IntegrationRuntimeAuthKeys type IntegrationRuntimeComputeProperties = original.IntegrationRuntimeComputeProperties type IntegrationRuntimeConnectionInfo = original.IntegrationRuntimeConnectionInfo type IntegrationRuntimeCustomSetupScriptProperties = original.IntegrationRuntimeCustomSetupScriptProperties +type IntegrationRuntimeDataFlowProperties = original.IntegrationRuntimeDataFlowProperties type IntegrationRuntimeDataProxyProperties = original.IntegrationRuntimeDataProxyProperties type IntegrationRuntimeListResponse = original.IntegrationRuntimeListResponse type IntegrationRuntimeListResponseIterator = original.IntegrationRuntimeListResponseIterator @@ -2166,6 +2177,9 @@ func PossibleBlobEventTypesValues() []BlobEventTypes { func PossibleCassandraSourceReadConsistencyLevelsValues() []CassandraSourceReadConsistencyLevels { return original.PossibleCassandraSourceReadConsistencyLevelsValues() } +func PossibleDataFlowComputeTypeValues() []DataFlowComputeType { + return original.PossibleDataFlowComputeTypeValues() +} func PossibleDataFlowDebugCommandTypeValues() []DataFlowDebugCommandType { return original.PossibleDataFlowDebugCommandTypeValues() } diff --git a/profiles/preview/datafactory/mgmt/datafactory/models.go b/profiles/preview/datafactory/mgmt/datafactory/models.go index ec5dd916c714..fd2334ff249f 100644 --- a/profiles/preview/datafactory/mgmt/datafactory/models.go +++ b/profiles/preview/datafactory/mgmt/datafactory/models.go @@ -97,6 +97,14 @@ const ( TWO CassandraSourceReadConsistencyLevels = original.TWO ) +type DataFlowComputeType = original.DataFlowComputeType + +const ( + ComputeOptimized DataFlowComputeType = original.ComputeOptimized + General DataFlowComputeType = original.General + MemoryOptimized DataFlowComputeType = original.MemoryOptimized +) + type DataFlowDebugCommandType = original.DataFlowDebugCommandType const ( @@ -1369,6 +1377,8 @@ type DataFlowDebugCommandResponse = original.DataFlowDebugCommandResponse type DataFlowDebugPackage = original.DataFlowDebugPackage type DataFlowDebugPackageDebugSettings = original.DataFlowDebugPackageDebugSettings type DataFlowDebugSessionClient = original.DataFlowDebugSessionClient +type DataFlowDebugSessionCreateFuture = original.DataFlowDebugSessionCreateFuture +type DataFlowDebugSessionExecuteCommandFuture = original.DataFlowDebugSessionExecuteCommandFuture type DataFlowDebugSessionInfo = original.DataFlowDebugSessionInfo type DataFlowFolder = original.DataFlowFolder type DataFlowListResponse = original.DataFlowListResponse @@ -1573,6 +1583,7 @@ type IntegrationRuntimeAuthKeys = original.IntegrationRuntimeAuthKeys type IntegrationRuntimeComputeProperties = original.IntegrationRuntimeComputeProperties type IntegrationRuntimeConnectionInfo = original.IntegrationRuntimeConnectionInfo type IntegrationRuntimeCustomSetupScriptProperties = original.IntegrationRuntimeCustomSetupScriptProperties +type IntegrationRuntimeDataFlowProperties = original.IntegrationRuntimeDataFlowProperties type IntegrationRuntimeDataProxyProperties = original.IntegrationRuntimeDataProxyProperties type IntegrationRuntimeListResponse = original.IntegrationRuntimeListResponse type IntegrationRuntimeListResponseIterator = original.IntegrationRuntimeListResponseIterator @@ -2166,6 +2177,9 @@ func PossibleBlobEventTypesValues() []BlobEventTypes { func PossibleCassandraSourceReadConsistencyLevelsValues() []CassandraSourceReadConsistencyLevels { return original.PossibleCassandraSourceReadConsistencyLevelsValues() } +func PossibleDataFlowComputeTypeValues() []DataFlowComputeType { + return original.PossibleDataFlowComputeTypeValues() +} func PossibleDataFlowDebugCommandTypeValues() []DataFlowDebugCommandType { return original.PossibleDataFlowDebugCommandTypeValues() } diff --git a/services/datafactory/mgmt/2018-06-01/datafactory/datafactoryapi/interfaces.go b/services/datafactory/mgmt/2018-06-01/datafactory/datafactoryapi/interfaces.go index 04424a7947a0..cf9ac665bd95 100644 --- a/services/datafactory/mgmt/2018-06-01/datafactory/datafactoryapi/interfaces.go +++ b/services/datafactory/mgmt/2018-06-01/datafactory/datafactoryapi/interfaces.go @@ -187,9 +187,9 @@ var _ DataFlowsClientAPI = (*datafactory.DataFlowsClient)(nil) // DataFlowDebugSessionClientAPI contains the set of methods on the DataFlowDebugSessionClient type. type DataFlowDebugSessionClientAPI interface { AddDataFlow(ctx context.Context, resourceGroupName string, factoryName string, request datafactory.DataFlowDebugPackage) (result datafactory.AddDataFlowToDebugSessionResponse, err error) - Create(ctx context.Context, resourceGroupName string, factoryName string, request datafactory.CreateDataFlowDebugSessionRequest) (result datafactory.CreateDataFlowDebugSessionResponse, err error) + Create(ctx context.Context, resourceGroupName string, factoryName string, request datafactory.CreateDataFlowDebugSessionRequest) (result datafactory.DataFlowDebugSessionCreateFuture, err error) Delete(ctx context.Context, resourceGroupName string, factoryName string, request datafactory.DeleteDataFlowDebugSessionRequest) (result autorest.Response, err error) - ExecuteCommand(ctx context.Context, resourceGroupName string, factoryName string, request datafactory.DataFlowDebugCommandRequest) (result datafactory.DataFlowDebugCommandResponse, err error) + ExecuteCommand(ctx context.Context, resourceGroupName string, factoryName string, request datafactory.DataFlowDebugCommandRequest) (result datafactory.DataFlowDebugSessionExecuteCommandFuture, err error) QueryByFactory(ctx context.Context, resourceGroupName string, factoryName string) (result datafactory.QueryDataFlowDebugSessionsResponsePage, err error) } diff --git a/services/datafactory/mgmt/2018-06-01/datafactory/dataflowdebugsession.go b/services/datafactory/mgmt/2018-06-01/datafactory/dataflowdebugsession.go index 02ccaf45c878..91bab8cfe2e1 100644 --- a/services/datafactory/mgmt/2018-06-01/datafactory/dataflowdebugsession.go +++ b/services/datafactory/mgmt/2018-06-01/datafactory/dataflowdebugsession.go @@ -148,13 +148,13 @@ func (client DataFlowDebugSessionClient) AddDataFlowResponder(resp *http.Respons // resourceGroupName - the resource group name. // factoryName - the factory name. // request - data flow debug session definition -func (client DataFlowDebugSessionClient) Create(ctx context.Context, resourceGroupName string, factoryName string, request CreateDataFlowDebugSessionRequest) (result CreateDataFlowDebugSessionResponse, err error) { +func (client DataFlowDebugSessionClient) Create(ctx context.Context, resourceGroupName string, factoryName string, request CreateDataFlowDebugSessionRequest) (result DataFlowDebugSessionCreateFuture, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/DataFlowDebugSessionClient.Create") defer func() { sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode + if result.Response() != nil { + sc = result.Response().StatusCode } tracing.EndSpan(ctx, sc, err) }() @@ -180,18 +180,12 @@ func (client DataFlowDebugSessionClient) Create(ctx context.Context, resourceGro return } - resp, err := client.CreateSender(req) + result, err = client.CreateSender(req) if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionClient", "Create", resp, "Failure sending request") + err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionClient", "Create", result.Response(), "Failure sending request") return } - result, err = client.CreateResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionClient", "Create", resp, "Failure responding to request") - } - return } @@ -220,9 +214,15 @@ func (client DataFlowDebugSessionClient) CreatePreparer(ctx context.Context, res // CreateSender sends the Create request. The method will close the // http.Response Body if it receives an error. -func (client DataFlowDebugSessionClient) CreateSender(req *http.Request) (*http.Response, error) { +func (client DataFlowDebugSessionClient) CreateSender(req *http.Request) (future DataFlowDebugSessionCreateFuture, err error) { sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client)) - return autorest.SendWithSender(client, req, sd...) + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, sd...) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return } // CreateResponder handles the response to the Create request. The method always @@ -334,13 +334,13 @@ func (client DataFlowDebugSessionClient) DeleteResponder(resp *http.Response) (r // resourceGroupName - the resource group name. // factoryName - the factory name. // request - data flow debug command definition. -func (client DataFlowDebugSessionClient) ExecuteCommand(ctx context.Context, resourceGroupName string, factoryName string, request DataFlowDebugCommandRequest) (result DataFlowDebugCommandResponse, err error) { +func (client DataFlowDebugSessionClient) ExecuteCommand(ctx context.Context, resourceGroupName string, factoryName string, request DataFlowDebugCommandRequest) (result DataFlowDebugSessionExecuteCommandFuture, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/DataFlowDebugSessionClient.ExecuteCommand") defer func() { sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode + if result.Response() != nil { + sc = result.Response().StatusCode } tracing.EndSpan(ctx, sc, err) }() @@ -366,18 +366,12 @@ func (client DataFlowDebugSessionClient) ExecuteCommand(ctx context.Context, res return } - resp, err := client.ExecuteCommandSender(req) + result, err = client.ExecuteCommandSender(req) if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionClient", "ExecuteCommand", resp, "Failure sending request") + err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionClient", "ExecuteCommand", result.Response(), "Failure sending request") return } - result, err = client.ExecuteCommandResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionClient", "ExecuteCommand", resp, "Failure responding to request") - } - return } @@ -406,9 +400,15 @@ func (client DataFlowDebugSessionClient) ExecuteCommandPreparer(ctx context.Cont // ExecuteCommandSender sends the ExecuteCommand request. The method will close the // http.Response Body if it receives an error. -func (client DataFlowDebugSessionClient) ExecuteCommandSender(req *http.Request) (*http.Response, error) { +func (client DataFlowDebugSessionClient) ExecuteCommandSender(req *http.Request) (future DataFlowDebugSessionExecuteCommandFuture, err error) { sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client)) - return autorest.SendWithSender(client, req, sd...) + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, sd...) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return } // ExecuteCommandResponder handles the response to the ExecuteCommand request. The method always diff --git a/services/datafactory/mgmt/2018-06-01/datafactory/models.go b/services/datafactory/mgmt/2018-06-01/datafactory/models.go index 6073007fa7d8..a7f764d671c7 100644 --- a/services/datafactory/mgmt/2018-06-01/datafactory/models.go +++ b/services/datafactory/mgmt/2018-06-01/datafactory/models.go @@ -175,6 +175,23 @@ func PossibleCassandraSourceReadConsistencyLevelsValues() []CassandraSourceReadC return []CassandraSourceReadConsistencyLevels{ALL, EACHQUORUM, LOCALONE, LOCALQUORUM, LOCALSERIAL, ONE, QUORUM, SERIAL, THREE, TWO} } +// DataFlowComputeType enumerates the values for data flow compute type. +type DataFlowComputeType string + +const ( + // ComputeOptimized ... + ComputeOptimized DataFlowComputeType = "ComputeOptimized" + // General ... + General DataFlowComputeType = "General" + // MemoryOptimized ... + MemoryOptimized DataFlowComputeType = "MemoryOptimized" +) + +// PossibleDataFlowComputeTypeValues returns an array of possible values for the DataFlowComputeType const type. +func PossibleDataFlowComputeTypeValues() []DataFlowComputeType { + return []DataFlowComputeType{ComputeOptimized, General, MemoryOptimized} +} + // DataFlowDebugCommandType enumerates the values for data flow debug command type. type DataFlowDebugCommandType string @@ -62253,6 +62270,64 @@ func (dfdpS DataFlowDebugPackageDebugSettings) MarshalJSON() ([]byte, error) { return json.Marshal(objectMap) } +// DataFlowDebugSessionCreateFuture an abstraction for monitoring and retrieving the results of a +// long-running operation. +type DataFlowDebugSessionCreateFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *DataFlowDebugSessionCreateFuture) Result(client DataFlowDebugSessionClient) (cdfdsr CreateDataFlowDebugSessionResponse, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionCreateFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datafactory.DataFlowDebugSessionCreateFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if cdfdsr.Response.Response, err = future.GetResult(sender); err == nil && cdfdsr.Response.Response.StatusCode != http.StatusNoContent { + cdfdsr, err = client.CreateResponder(cdfdsr.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionCreateFuture", "Result", cdfdsr.Response.Response, "Failure responding to request") + } + } + return +} + +// DataFlowDebugSessionExecuteCommandFuture an abstraction for monitoring and retrieving the results of a +// long-running operation. +type DataFlowDebugSessionExecuteCommandFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *DataFlowDebugSessionExecuteCommandFuture) Result(client DataFlowDebugSessionClient) (dfdcr DataFlowDebugCommandResponse, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionExecuteCommandFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datafactory.DataFlowDebugSessionExecuteCommandFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if dfdcr.Response.Response, err = future.GetResult(sender); err == nil && dfdcr.Response.Response.StatusCode != http.StatusNoContent { + dfdcr, err = client.ExecuteCommandResponder(dfdcr.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datafactory.DataFlowDebugSessionExecuteCommandFuture", "Result", dfdcr.Response.Response, "Failure responding to request") + } + } + return +} + // DataFlowDebugSessionInfo data flow debug session info. type DataFlowDebugSessionInfo struct { // AdditionalProperties - Unmatched properties from the message are deserialized this collection @@ -111504,6 +111579,8 @@ type IntegrationRuntimeComputeProperties struct { NumberOfNodes *int32 `json:"numberOfNodes,omitempty"` // MaxParallelExecutionsPerNode - Maximum parallel executions count per node for managed integration runtime. MaxParallelExecutionsPerNode *int32 `json:"maxParallelExecutionsPerNode,omitempty"` + // DataFlowProperties - Data flow properties for managed integration runtime. + DataFlowProperties *IntegrationRuntimeDataFlowProperties `json:"dataFlowProperties,omitempty"` // VNetProperties - VNet properties for managed integration runtime. VNetProperties *IntegrationRuntimeVNetProperties `json:"vNetProperties,omitempty"` } @@ -111523,6 +111600,9 @@ func (ircp IntegrationRuntimeComputeProperties) MarshalJSON() ([]byte, error) { if ircp.MaxParallelExecutionsPerNode != nil { objectMap["maxParallelExecutionsPerNode"] = ircp.MaxParallelExecutionsPerNode } + if ircp.DataFlowProperties != nil { + objectMap["dataFlowProperties"] = ircp.DataFlowProperties + } if ircp.VNetProperties != nil { objectMap["vNetProperties"] = ircp.VNetProperties } @@ -111589,6 +111669,15 @@ func (ircp *IntegrationRuntimeComputeProperties) UnmarshalJSON(body []byte) erro } ircp.MaxParallelExecutionsPerNode = &maxParallelExecutionsPerNode } + case "dataFlowProperties": + if v != nil { + var dataFlowProperties IntegrationRuntimeDataFlowProperties + err = json.Unmarshal(*v, &dataFlowProperties) + if err != nil { + return err + } + ircp.DataFlowProperties = &dataFlowProperties + } case "vNetProperties": if v != nil { var vNetProperties IntegrationRuntimeVNetProperties @@ -111723,6 +111812,90 @@ type IntegrationRuntimeCustomSetupScriptProperties struct { SasToken *SecureString `json:"sasToken,omitempty"` } +// IntegrationRuntimeDataFlowProperties data flow properties for managed integration runtime. +type IntegrationRuntimeDataFlowProperties struct { + // AdditionalProperties - Unmatched properties from the message are deserialized this collection + AdditionalProperties map[string]interface{} `json:""` + // ComputeType - Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized' + ComputeType DataFlowComputeType `json:"computeType,omitempty"` + // CoreCount - Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. + CoreCount *int32 `json:"coreCount,omitempty"` + // TimeToLive - Time to live (in minutes) setting of the cluster which will execute data flow job. + TimeToLive *int32 `json:"timeToLive,omitempty"` +} + +// MarshalJSON is the custom marshaler for IntegrationRuntimeDataFlowProperties. +func (irdfp IntegrationRuntimeDataFlowProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if irdfp.ComputeType != "" { + objectMap["computeType"] = irdfp.ComputeType + } + if irdfp.CoreCount != nil { + objectMap["coreCount"] = irdfp.CoreCount + } + if irdfp.TimeToLive != nil { + objectMap["timeToLive"] = irdfp.TimeToLive + } + for k, v := range irdfp.AdditionalProperties { + objectMap[k] = v + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for IntegrationRuntimeDataFlowProperties struct. +func (irdfp *IntegrationRuntimeDataFlowProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + default: + if v != nil { + var additionalProperties interface{} + err = json.Unmarshal(*v, &additionalProperties) + if err != nil { + return err + } + if irdfp.AdditionalProperties == nil { + irdfp.AdditionalProperties = make(map[string]interface{}) + } + irdfp.AdditionalProperties[k] = additionalProperties + } + case "computeType": + if v != nil { + var computeType DataFlowComputeType + err = json.Unmarshal(*v, &computeType) + if err != nil { + return err + } + irdfp.ComputeType = computeType + } + case "coreCount": + if v != nil { + var coreCount int32 + err = json.Unmarshal(*v, &coreCount) + if err != nil { + return err + } + irdfp.CoreCount = &coreCount + } + case "timeToLive": + if v != nil { + var timeToLive int32 + err = json.Unmarshal(*v, &timeToLive) + if err != nil { + return err + } + irdfp.TimeToLive = &timeToLive + } + } + } + + return nil +} + // IntegrationRuntimeDataProxyProperties data proxy properties for a managed dedicated integration runtime. type IntegrationRuntimeDataProxyProperties struct { // ConnectVia - The self-hosted integration runtime reference.