From 66b7f86cb39aad89c96a8882180ab8b223d4799a Mon Sep 17 00:00:00 2001 From: tombuildsstuff Date: Wed, 10 Apr 2019 12:47:35 +0200 Subject: [PATCH 1/4] r/stream_analytics: vendoring the sdk, registering the client/RP --- azurerm/config.go | 34 +- azurerm/required_resource_providers.go | 1 + variables.tf | 7 - .../mgmt/2016-03-01/streamanalytics/client.go | 51 + .../2016-03-01/streamanalytics/functions.go | 689 +++ .../mgmt/2016-03-01/streamanalytics/inputs.go | 600 ++ .../mgmt/2016-03-01/streamanalytics/models.go | 5277 +++++++++++++++++ .../2016-03-01/streamanalytics/operations.go | 147 + .../2016-03-01/streamanalytics/outputs.go | 600 ++ .../streamanalytics/streamingjobs.go | 788 +++ .../streamanalytics/subscriptions.go | 117 + .../streamanalytics/transformations.go | 311 + .../2016-03-01/streamanalytics/version.go | 30 + vendor/modules.txt | 3 +- 14 files changed, 8645 insertions(+), 10 deletions(-) delete mode 100644 variables.tf create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/client.go create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/functions.go create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/inputs.go create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/models.go create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/operations.go create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/outputs.go create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/streamingjobs.go create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/subscriptions.go create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/transformations.go create mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/version.go diff --git a/azurerm/config.go b/azurerm/config.go index 7b00a91f1c34..a5b810cf6690 100644 --- a/azurerm/config.go +++ b/azurerm/config.go @@ -66,6 +66,7 @@ import ( "github.com/Azure/azure-sdk-for-go/services/servicebus/mgmt/2017-04-01/servicebus" "github.com/Azure/azure-sdk-for-go/services/servicefabric/mgmt/2018-02-01/servicefabric" "github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2018-02-01/storage" + "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics" "github.com/Azure/azure-sdk-for-go/services/trafficmanager/mgmt/2018-04-01/trafficmanager" "github.com/Azure/azure-sdk-for-go/services/web/mgmt/2018-02-01/web" @@ -324,8 +325,7 @@ type ArmClient struct { // Scheduler schedulerJobCollectionsClient scheduler.JobCollectionsClient //nolint: megacheck - - schedulerJobsClient scheduler.JobsClient //nolint: megacheck + schedulerJobsClient scheduler.JobsClient //nolint: megacheck // Search searchServicesClient search.ServicesClient @@ -353,6 +353,13 @@ type ArmClient struct { storageServiceClient storage.AccountsClient storageUsageClient storage.UsageClient + // Stream Analytics + streamAnalyticsFunctionsClient streamanalytics.FunctionsClient + streamAnalyticsJobsClient streamanalytics.StreamingJobsClient + streamAnalyticsInputsClient streamanalytics.InputsClient + streamAnalyticsOutputsClient streamanalytics.OutputsClient + streamAnalyticsTransformationsClient streamanalytics.TransformationsClient + // Traffic Manager trafficManagerGeographialHierarchiesClient trafficmanager.GeographicHierarchiesClient trafficManagerProfilesClient trafficmanager.ProfilesClient @@ -514,6 +521,7 @@ func getArmClient(c *authentication.Config, skipProviderRegistration bool, partn client.registerSchedulerClients(endpoint, c.SubscriptionID, auth) client.registerSignalRClients(endpoint, c.SubscriptionID, auth) client.registerStorageClients(endpoint, c.SubscriptionID, auth) + client.registerStreamAnalyticsClients(endpoint, c.SubscriptionID, auth) client.registerTrafficManagerClients(endpoint, c.SubscriptionID, auth) client.registerWebClients(endpoint, c.SubscriptionID, auth) @@ -1318,6 +1326,28 @@ func (c *ArmClient) registerStorageClients(endpoint, subscriptionId string, auth c.storageUsageClient = usageClient } +func (c *ArmClient) registerStreamAnalyticsClients(endpoint, subscriptionId string, auth autorest.Authorizer) { + functionsClient := streamanalytics.NewFunctionsClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&functionsClient.Client, auth) + c.streamAnalyticsFunctionsClient = functionsClient + + jobsClient := streamanalytics.NewStreamingJobsClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&jobsClient.Client, auth) + c.streamAnalyticsJobsClient = jobsClient + + inputsClient := streamanalytics.NewInputsClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&inputsClient.Client, auth) + c.streamAnalyticsInputsClient = inputsClient + + outputsClient := streamanalytics.NewOutputsClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&outputsClient.Client, auth) + c.streamAnalyticsOutputsClient = outputsClient + + transformationsClient := streamanalytics.NewTransformationsClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&transformationsClient.Client, auth) + c.streamAnalyticsTransformationsClient = transformationsClient +} + func (c *ArmClient) registerTrafficManagerClients(endpoint, subscriptionId string, auth autorest.Authorizer) { endpointsClient := trafficmanager.NewEndpointsClientWithBaseURI(endpoint, c.subscriptionId) c.configureClient(&endpointsClient.Client, auth) diff --git a/azurerm/required_resource_providers.go b/azurerm/required_resource_providers.go index cd1678d03625..5494a99d14c3 100644 --- a/azurerm/required_resource_providers.go +++ b/azurerm/required_resource_providers.go @@ -58,6 +58,7 @@ func requiredResourceProviders() map[string]struct{} { "Microsoft.ServiceFabric": {}, "Microsoft.Sql": {}, "Microsoft.Storage": {}, + "Microsoft.StreamAnalytics": {}, "Microsoft.Web": {}, } } diff --git a/variables.tf b/variables.tf deleted file mode 100644 index fcd1745ba3e7..000000000000 --- a/variables.tf +++ /dev/null @@ -1,7 +0,0 @@ -variable "prefix" { - description = "The prefix used for all resources in this example" -} - -variable "location" { - description = "The Azure location where all resources in this example should be created" -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/client.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/client.go new file mode 100644 index 000000000000..273aabfe4f2e --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/client.go @@ -0,0 +1,51 @@ +// Package streamanalytics implements the Azure ARM Streamanalytics service API version 2016-03-01. +// +// Stream Analytics Client +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "github.com/Azure/go-autorest/autorest" +) + +const ( + // DefaultBaseURI is the default URI used for the service Streamanalytics + DefaultBaseURI = "https://management.azure.com" +) + +// BaseClient is the base client for Streamanalytics. +type BaseClient struct { + autorest.Client + BaseURI string + SubscriptionID string +} + +// New creates an instance of the BaseClient client. +func New(subscriptionID string) BaseClient { + return NewWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewWithBaseURI creates an instance of the BaseClient client. +func NewWithBaseURI(baseURI string, subscriptionID string) BaseClient { + return BaseClient{ + Client: autorest.NewClientWithUserAgent(UserAgent()), + BaseURI: baseURI, + SubscriptionID: subscriptionID, + } +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/functions.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/functions.go new file mode 100644 index 000000000000..0160868d9956 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/functions.go @@ -0,0 +1,689 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// FunctionsClient is the stream Analytics Client +type FunctionsClient struct { + BaseClient +} + +// NewFunctionsClient creates an instance of the FunctionsClient client. +func NewFunctionsClient(subscriptionID string) FunctionsClient { + return NewFunctionsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewFunctionsClientWithBaseURI creates an instance of the FunctionsClient client. +func NewFunctionsClientWithBaseURI(baseURI string, subscriptionID string) FunctionsClient { + return FunctionsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates a function or replaces an already existing function under an existing streaming job. +// Parameters: +// function - the definition of the function that will be used to create a new function or replace the existing +// one under the streaming job. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// functionName - the name of the function. +// ifMatch - the ETag of the function. Omit this value to always overwrite the current function. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new function to be created, but to prevent updating an existing +// function. Other values will result in a 412 Pre-condition Failed response. +func (client FunctionsClient) CreateOrReplace(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string, ifNoneMatch string) (result Function, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreateOrReplacePreparer(ctx, function, resourceGroupName, jobName, functionName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrReplaceSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "CreateOrReplace", resp, "Failure sending request") + return + } + + result, err = client.CreateOrReplaceResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "CreateOrReplace", resp, "Failure responding to request") + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client FunctionsClient) CreateOrReplacePreparer(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), + autorest.WithJSON(function), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client FunctionsClient) CreateOrReplaceResponder(resp *http.Response) (result Function, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes a function from the streaming job. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// functionName - the name of the function. +func (client FunctionsClient) Delete(ctx context.Context, resourceGroupName string, jobName string, functionName string) (result autorest.Response, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Delete") + defer func() { + sc := -1 + if result.Response != nil { + sc = result.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, jobName, functionName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client FunctionsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client FunctionsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets details about the specified function. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// functionName - the name of the function. +func (client FunctionsClient) Get(ctx context.Context, resourceGroupName string, jobName string, functionName string) (result Function, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, functionName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client FunctionsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client FunctionsClient) GetResponder(resp *http.Response) (result Function, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByStreamingJob lists all of the functions under the specified streaming job. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// selectParameter - the $select OData query parameter. This is a comma-separated list of structural properties +// to include in the response, or "*" to include all properties. By default, all properties are returned except +// diagnostics. Currently only accepts '*' as a valid value. +func (client FunctionsClient) ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result FunctionListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.flr.Response.Response != nil { + sc = result.flr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByStreamingJobNextResults + req, err := client.ListByStreamingJobPreparer(ctx, resourceGroupName, jobName, selectParameter) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.flr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "ListByStreamingJob", resp, "Failure sending request") + return + } + + result.flr, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "ListByStreamingJob", resp, "Failure responding to request") + } + + return +} + +// ListByStreamingJobPreparer prepares the ListByStreamingJob request. +func (client FunctionsClient) ListByStreamingJobPreparer(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByStreamingJobSender sends the ListByStreamingJob request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) ListByStreamingJobSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListByStreamingJobResponder handles the response to the ListByStreamingJob request. The method always +// closes the http.Response Body. +func (client FunctionsClient) ListByStreamingJobResponder(resp *http.Response) (result FunctionListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByStreamingJobNextResults retrieves the next set of results, if any. +func (client FunctionsClient) listByStreamingJobNextResults(ctx context.Context, lastResults FunctionListResult) (result FunctionListResult, err error) { + req, err := lastResults.functionListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "listByStreamingJobNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "listByStreamingJobNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "listByStreamingJobNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByStreamingJobComplete enumerates all values, automatically crossing page boundaries as required. +func (client FunctionsClient) ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result FunctionListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByStreamingJob(ctx, resourceGroupName, jobName, selectParameter) + return +} + +// RetrieveDefaultDefinition retrieves the default definition of a function based on the parameters specified. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// functionName - the name of the function. +// functionRetrieveDefaultDefinitionParameters - parameters used to specify the type of function to retrieve +// the default definition for. +func (client FunctionsClient) RetrieveDefaultDefinition(ctx context.Context, resourceGroupName string, jobName string, functionName string, functionRetrieveDefaultDefinitionParameters *BasicFunctionRetrieveDefaultDefinitionParameters) (result Function, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.RetrieveDefaultDefinition") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.RetrieveDefaultDefinitionPreparer(ctx, resourceGroupName, jobName, functionName, functionRetrieveDefaultDefinitionParameters) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", nil, "Failure preparing request") + return + } + + resp, err := client.RetrieveDefaultDefinitionSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", resp, "Failure sending request") + return + } + + result, err = client.RetrieveDefaultDefinitionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", resp, "Failure responding to request") + } + + return +} + +// RetrieveDefaultDefinitionPreparer prepares the RetrieveDefaultDefinition request. +func (client FunctionsClient) RetrieveDefaultDefinitionPreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string, functionRetrieveDefaultDefinitionParameters *BasicFunctionRetrieveDefaultDefinitionParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/RetrieveDefaultDefinition", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if functionRetrieveDefaultDefinitionParameters != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(functionRetrieveDefaultDefinitionParameters)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// RetrieveDefaultDefinitionSender sends the RetrieveDefaultDefinition request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) RetrieveDefaultDefinitionSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// RetrieveDefaultDefinitionResponder handles the response to the RetrieveDefaultDefinition request. The method always +// closes the http.Response Body. +func (client FunctionsClient) RetrieveDefaultDefinitionResponder(resp *http.Response) (result Function, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Test tests if the information provided for a function is valid. This can range from testing the connection to the +// underlying web service behind the function or making sure the function code provided is syntactically correct. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// functionName - the name of the function. +// function - if the function specified does not already exist, this parameter must contain the full function +// definition intended to be tested. If the function specified already exists, this parameter can be left null +// to test the existing function as is or if specified, the properties specified will overwrite the +// corresponding properties in the existing function (exactly like a PATCH operation) and the resulting +// function will be tested. +func (client FunctionsClient) Test(ctx context.Context, resourceGroupName string, jobName string, functionName string, function *Function) (result FunctionsTestFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Test") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.TestPreparer(ctx, resourceGroupName, jobName, functionName, function) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Test", nil, "Failure preparing request") + return + } + + result, err = client.TestSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Test", result.Response(), "Failure sending request") + return + } + + return +} + +// TestPreparer prepares the Test request. +func (client FunctionsClient) TestPreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string, function *Function) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if function != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(function)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// TestSender sends the Test request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) TestSender(req *http.Request) (future FunctionsTestFuture, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// TestResponder handles the response to the Test request. The method always +// closes the http.Response Body. +func (client FunctionsClient) TestResponder(resp *http.Response) (result ResourceTestStatus, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Update updates an existing function under an existing streaming job. This can be used to partially update (ie. +// update one or two properties) a function without affecting the rest the job or function definition. +// Parameters: +// function - a function object. The properties specified here will overwrite the corresponding properties in +// the existing function (ie. Those properties will be updated). Any properties that are set to null here will +// mean that the corresponding property in the existing function will remain the same and not change as a +// result of this PATCH operation. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// functionName - the name of the function. +// ifMatch - the ETag of the function. Omit this value to always overwrite the current function. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client FunctionsClient) Update(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string) (result Function, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.UpdatePreparer(ctx, function, resourceGroupName, jobName, functionName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client FunctionsClient) UpdatePreparer(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), + autorest.WithJSON(function), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client FunctionsClient) UpdateResponder(resp *http.Response) (result Function, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/inputs.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/inputs.go new file mode 100644 index 000000000000..f5c66bc99731 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/inputs.go @@ -0,0 +1,600 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// InputsClient is the stream Analytics Client +type InputsClient struct { + BaseClient +} + +// NewInputsClient creates an instance of the InputsClient client. +func NewInputsClient(subscriptionID string) InputsClient { + return NewInputsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewInputsClientWithBaseURI creates an instance of the InputsClient client. +func NewInputsClientWithBaseURI(baseURI string, subscriptionID string) InputsClient { + return InputsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates an input or replaces an already existing input under an existing streaming job. +// Parameters: +// input - the definition of the input that will be used to create a new input or replace the existing one +// under the streaming job. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// inputName - the name of the input. +// ifMatch - the ETag of the input. Omit this value to always overwrite the current input. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new input to be created, but to prevent updating an existing input. +// Other values will result in a 412 Pre-condition Failed response. +func (client InputsClient) CreateOrReplace(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string, ifNoneMatch string) (result Input, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreateOrReplacePreparer(ctx, input, resourceGroupName, jobName, inputName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrReplaceSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "CreateOrReplace", resp, "Failure sending request") + return + } + + result, err = client.CreateOrReplaceResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "CreateOrReplace", resp, "Failure responding to request") + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client InputsClient) CreateOrReplacePreparer(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client InputsClient) CreateOrReplaceResponder(resp *http.Response) (result Input, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes an input from the streaming job. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// inputName - the name of the input. +func (client InputsClient) Delete(ctx context.Context, resourceGroupName string, jobName string, inputName string) (result autorest.Response, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Delete") + defer func() { + sc := -1 + if result.Response != nil { + sc = result.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, jobName, inputName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client InputsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string, inputName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client InputsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets details about the specified input. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// inputName - the name of the input. +func (client InputsClient) Get(ctx context.Context, resourceGroupName string, jobName string, inputName string) (result Input, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, inputName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client InputsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, inputName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client InputsClient) GetResponder(resp *http.Response) (result Input, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByStreamingJob lists all of the inputs under the specified streaming job. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// selectParameter - the $select OData query parameter. This is a comma-separated list of structural properties +// to include in the response, or "*" to include all properties. By default, all properties are returned except +// diagnostics. Currently only accepts '*' as a valid value. +func (client InputsClient) ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result InputListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.ilr.Response.Response != nil { + sc = result.ilr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByStreamingJobNextResults + req, err := client.ListByStreamingJobPreparer(ctx, resourceGroupName, jobName, selectParameter) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.ilr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "ListByStreamingJob", resp, "Failure sending request") + return + } + + result.ilr, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "ListByStreamingJob", resp, "Failure responding to request") + } + + return +} + +// ListByStreamingJobPreparer prepares the ListByStreamingJob request. +func (client InputsClient) ListByStreamingJobPreparer(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByStreamingJobSender sends the ListByStreamingJob request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) ListByStreamingJobSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListByStreamingJobResponder handles the response to the ListByStreamingJob request. The method always +// closes the http.Response Body. +func (client InputsClient) ListByStreamingJobResponder(resp *http.Response) (result InputListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByStreamingJobNextResults retrieves the next set of results, if any. +func (client InputsClient) listByStreamingJobNextResults(ctx context.Context, lastResults InputListResult) (result InputListResult, err error) { + req, err := lastResults.inputListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "listByStreamingJobNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "listByStreamingJobNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "listByStreamingJobNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByStreamingJobComplete enumerates all values, automatically crossing page boundaries as required. +func (client InputsClient) ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result InputListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByStreamingJob(ctx, resourceGroupName, jobName, selectParameter) + return +} + +// Test tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics service. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// inputName - the name of the input. +// input - if the input specified does not already exist, this parameter must contain the full input definition +// intended to be tested. If the input specified already exists, this parameter can be left null to test the +// existing input as is or if specified, the properties specified will overwrite the corresponding properties +// in the existing input (exactly like a PATCH operation) and the resulting input will be tested. +func (client InputsClient) Test(ctx context.Context, resourceGroupName string, jobName string, inputName string, input *Input) (result InputsTestFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Test") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.TestPreparer(ctx, resourceGroupName, jobName, inputName, input) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Test", nil, "Failure preparing request") + return + } + + result, err = client.TestSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Test", result.Response(), "Failure sending request") + return + } + + return +} + +// TestPreparer prepares the Test request. +func (client InputsClient) TestPreparer(ctx context.Context, resourceGroupName string, jobName string, inputName string, input *Input) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if input != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(input)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// TestSender sends the Test request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) TestSender(req *http.Request) (future InputsTestFuture, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// TestResponder handles the response to the Test request. The method always +// closes the http.Response Body. +func (client InputsClient) TestResponder(resp *http.Response) (result ResourceTestStatus, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Update updates an existing input under an existing streaming job. This can be used to partially update (ie. update +// one or two properties) an input without affecting the rest the job or input definition. +// Parameters: +// input - an Input object. The properties specified here will overwrite the corresponding properties in the +// existing input (ie. Those properties will be updated). Any properties that are set to null here will mean +// that the corresponding property in the existing input will remain the same and not change as a result of +// this PATCH operation. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// inputName - the name of the input. +// ifMatch - the ETag of the input. Omit this value to always overwrite the current input. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client InputsClient) Update(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string) (result Input, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.UpdatePreparer(ctx, input, resourceGroupName, jobName, inputName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client InputsClient) UpdatePreparer(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client InputsClient) UpdateResponder(resp *http.Response) (result Input, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/models.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/models.go new file mode 100644 index 000000000000..ef8ab4736aa9 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/models.go @@ -0,0 +1,5277 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "encoding/json" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/date" + "github.com/Azure/go-autorest/autorest/to" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// The package's fully qualified name. +const fqdn = "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics" + +// BindingType enumerates the values for binding type. +type BindingType string + +const ( + // BindingTypeFunctionRetrieveDefaultDefinitionParameters ... + BindingTypeFunctionRetrieveDefaultDefinitionParameters BindingType = "FunctionRetrieveDefaultDefinitionParameters" + // BindingTypeMicrosoftMachineLearningWebService ... + BindingTypeMicrosoftMachineLearningWebService BindingType = "Microsoft.MachineLearning/WebService" + // BindingTypeMicrosoftStreamAnalyticsJavascriptUdf ... + BindingTypeMicrosoftStreamAnalyticsJavascriptUdf BindingType = "Microsoft.StreamAnalytics/JavascriptUdf" +) + +// PossibleBindingTypeValues returns an array of possible values for the BindingType const type. +func PossibleBindingTypeValues() []BindingType { + return []BindingType{BindingTypeFunctionRetrieveDefaultDefinitionParameters, BindingTypeMicrosoftMachineLearningWebService, BindingTypeMicrosoftStreamAnalyticsJavascriptUdf} +} + +// CompatibilityLevel enumerates the values for compatibility level. +type CompatibilityLevel string + +const ( + // OneFullStopZero ... + OneFullStopZero CompatibilityLevel = "1.0" +) + +// PossibleCompatibilityLevelValues returns an array of possible values for the CompatibilityLevel const type. +func PossibleCompatibilityLevelValues() []CompatibilityLevel { + return []CompatibilityLevel{OneFullStopZero} +} + +// Encoding enumerates the values for encoding. +type Encoding string + +const ( + // UTF8 ... + UTF8 Encoding = "UTF8" +) + +// PossibleEncodingValues returns an array of possible values for the Encoding const type. +func PossibleEncodingValues() []Encoding { + return []Encoding{UTF8} +} + +// EventsOutOfOrderPolicy enumerates the values for events out of order policy. +type EventsOutOfOrderPolicy string + +const ( + // Adjust ... + Adjust EventsOutOfOrderPolicy = "Adjust" + // Drop ... + Drop EventsOutOfOrderPolicy = "Drop" +) + +// PossibleEventsOutOfOrderPolicyValues returns an array of possible values for the EventsOutOfOrderPolicy const type. +func PossibleEventsOutOfOrderPolicyValues() []EventsOutOfOrderPolicy { + return []EventsOutOfOrderPolicy{Adjust, Drop} +} + +// JSONOutputSerializationFormat enumerates the values for json output serialization format. +type JSONOutputSerializationFormat string + +const ( + // Array ... + Array JSONOutputSerializationFormat = "Array" + // LineSeparated ... + LineSeparated JSONOutputSerializationFormat = "LineSeparated" +) + +// PossibleJSONOutputSerializationFormatValues returns an array of possible values for the JSONOutputSerializationFormat const type. +func PossibleJSONOutputSerializationFormatValues() []JSONOutputSerializationFormat { + return []JSONOutputSerializationFormat{Array, LineSeparated} +} + +// OutputErrorPolicy enumerates the values for output error policy. +type OutputErrorPolicy string + +const ( + // OutputErrorPolicyDrop ... + OutputErrorPolicyDrop OutputErrorPolicy = "Drop" + // OutputErrorPolicyStop ... + OutputErrorPolicyStop OutputErrorPolicy = "Stop" +) + +// PossibleOutputErrorPolicyValues returns an array of possible values for the OutputErrorPolicy const type. +func PossibleOutputErrorPolicyValues() []OutputErrorPolicy { + return []OutputErrorPolicy{OutputErrorPolicyDrop, OutputErrorPolicyStop} +} + +// OutputStartMode enumerates the values for output start mode. +type OutputStartMode string + +const ( + // CustomTime ... + CustomTime OutputStartMode = "CustomTime" + // JobStartTime ... + JobStartTime OutputStartMode = "JobStartTime" + // LastOutputEventTime ... + LastOutputEventTime OutputStartMode = "LastOutputEventTime" +) + +// PossibleOutputStartModeValues returns an array of possible values for the OutputStartMode const type. +func PossibleOutputStartModeValues() []OutputStartMode { + return []OutputStartMode{CustomTime, JobStartTime, LastOutputEventTime} +} + +// SkuName enumerates the values for sku name. +type SkuName string + +const ( + // Standard ... + Standard SkuName = "Standard" +) + +// PossibleSkuNameValues returns an array of possible values for the SkuName const type. +func PossibleSkuNameValues() []SkuName { + return []SkuName{Standard} +} + +// Type enumerates the values for type. +type Type string + +const ( + // TypeAvro ... + TypeAvro Type = "Avro" + // TypeCsv ... + TypeCsv Type = "Csv" + // TypeJSON ... + TypeJSON Type = "Json" + // TypeSerialization ... + TypeSerialization Type = "Serialization" +) + +// PossibleTypeValues returns an array of possible values for the Type const type. +func PossibleTypeValues() []Type { + return []Type{TypeAvro, TypeCsv, TypeJSON, TypeSerialization} +} + +// TypeBasicFunctionBinding enumerates the values for type basic function binding. +type TypeBasicFunctionBinding string + +const ( + // TypeFunctionBinding ... + TypeFunctionBinding TypeBasicFunctionBinding = "FunctionBinding" + // TypeMicrosoftMachineLearningWebService ... + TypeMicrosoftMachineLearningWebService TypeBasicFunctionBinding = "Microsoft.MachineLearning/WebService" + // TypeMicrosoftStreamAnalyticsJavascriptUdf ... + TypeMicrosoftStreamAnalyticsJavascriptUdf TypeBasicFunctionBinding = "Microsoft.StreamAnalytics/JavascriptUdf" +) + +// PossibleTypeBasicFunctionBindingValues returns an array of possible values for the TypeBasicFunctionBinding const type. +func PossibleTypeBasicFunctionBindingValues() []TypeBasicFunctionBinding { + return []TypeBasicFunctionBinding{TypeFunctionBinding, TypeMicrosoftMachineLearningWebService, TypeMicrosoftStreamAnalyticsJavascriptUdf} +} + +// TypeBasicFunctionProperties enumerates the values for type basic function properties. +type TypeBasicFunctionProperties string + +const ( + // TypeFunctionProperties ... + TypeFunctionProperties TypeBasicFunctionProperties = "FunctionProperties" + // TypeScalar ... + TypeScalar TypeBasicFunctionProperties = "Scalar" +) + +// PossibleTypeBasicFunctionPropertiesValues returns an array of possible values for the TypeBasicFunctionProperties const type. +func PossibleTypeBasicFunctionPropertiesValues() []TypeBasicFunctionProperties { + return []TypeBasicFunctionProperties{TypeFunctionProperties, TypeScalar} +} + +// TypeBasicInputProperties enumerates the values for type basic input properties. +type TypeBasicInputProperties string + +const ( + // TypeInputProperties ... + TypeInputProperties TypeBasicInputProperties = "InputProperties" + // TypeReference ... + TypeReference TypeBasicInputProperties = "Reference" + // TypeStream ... + TypeStream TypeBasicInputProperties = "Stream" +) + +// PossibleTypeBasicInputPropertiesValues returns an array of possible values for the TypeBasicInputProperties const type. +func PossibleTypeBasicInputPropertiesValues() []TypeBasicInputProperties { + return []TypeBasicInputProperties{TypeInputProperties, TypeReference, TypeStream} +} + +// TypeBasicOutputDataSource enumerates the values for type basic output data source. +type TypeBasicOutputDataSource string + +const ( + // TypeMicrosoftDataLakeAccounts ... + TypeMicrosoftDataLakeAccounts TypeBasicOutputDataSource = "Microsoft.DataLake/Accounts" + // TypeMicrosoftServiceBusEventHub ... + TypeMicrosoftServiceBusEventHub TypeBasicOutputDataSource = "Microsoft.ServiceBus/EventHub" + // TypeMicrosoftServiceBusQueue ... + TypeMicrosoftServiceBusQueue TypeBasicOutputDataSource = "Microsoft.ServiceBus/Queue" + // TypeMicrosoftServiceBusTopic ... + TypeMicrosoftServiceBusTopic TypeBasicOutputDataSource = "Microsoft.ServiceBus/Topic" + // TypeMicrosoftSQLServerDatabase ... + TypeMicrosoftSQLServerDatabase TypeBasicOutputDataSource = "Microsoft.Sql/Server/Database" + // TypeMicrosoftStorageBlob ... + TypeMicrosoftStorageBlob TypeBasicOutputDataSource = "Microsoft.Storage/Blob" + // TypeMicrosoftStorageDocumentDB ... + TypeMicrosoftStorageDocumentDB TypeBasicOutputDataSource = "Microsoft.Storage/DocumentDB" + // TypeMicrosoftStorageTable ... + TypeMicrosoftStorageTable TypeBasicOutputDataSource = "Microsoft.Storage/Table" + // TypeOutputDataSource ... + TypeOutputDataSource TypeBasicOutputDataSource = "OutputDataSource" + // TypePowerBI ... + TypePowerBI TypeBasicOutputDataSource = "PowerBI" +) + +// PossibleTypeBasicOutputDataSourceValues returns an array of possible values for the TypeBasicOutputDataSource const type. +func PossibleTypeBasicOutputDataSourceValues() []TypeBasicOutputDataSource { + return []TypeBasicOutputDataSource{TypeMicrosoftDataLakeAccounts, TypeMicrosoftServiceBusEventHub, TypeMicrosoftServiceBusQueue, TypeMicrosoftServiceBusTopic, TypeMicrosoftSQLServerDatabase, TypeMicrosoftStorageBlob, TypeMicrosoftStorageDocumentDB, TypeMicrosoftStorageTable, TypeOutputDataSource, TypePowerBI} +} + +// TypeBasicReferenceInputDataSource enumerates the values for type basic reference input data source. +type TypeBasicReferenceInputDataSource string + +const ( + // TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob ... + TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob TypeBasicReferenceInputDataSource = "Microsoft.Storage/Blob" + // TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource ... + TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource TypeBasicReferenceInputDataSource = "ReferenceInputDataSource" +) + +// PossibleTypeBasicReferenceInputDataSourceValues returns an array of possible values for the TypeBasicReferenceInputDataSource const type. +func PossibleTypeBasicReferenceInputDataSourceValues() []TypeBasicReferenceInputDataSource { + return []TypeBasicReferenceInputDataSource{TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob, TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource} +} + +// TypeBasicStreamInputDataSource enumerates the values for type basic stream input data source. +type TypeBasicStreamInputDataSource string + +const ( + // TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs ... + TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs TypeBasicStreamInputDataSource = "Microsoft.Devices/IotHubs" + // TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub ... + TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub TypeBasicStreamInputDataSource = "Microsoft.ServiceBus/EventHub" + // TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob ... + TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob TypeBasicStreamInputDataSource = "Microsoft.Storage/Blob" + // TypeBasicStreamInputDataSourceTypeStreamInputDataSource ... + TypeBasicStreamInputDataSourceTypeStreamInputDataSource TypeBasicStreamInputDataSource = "StreamInputDataSource" +) + +// PossibleTypeBasicStreamInputDataSourceValues returns an array of possible values for the TypeBasicStreamInputDataSource const type. +func PossibleTypeBasicStreamInputDataSourceValues() []TypeBasicStreamInputDataSource { + return []TypeBasicStreamInputDataSource{TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs, TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub, TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob, TypeBasicStreamInputDataSourceTypeStreamInputDataSource} +} + +// UdfType enumerates the values for udf type. +type UdfType string + +const ( + // Scalar ... + Scalar UdfType = "Scalar" +) + +// PossibleUdfTypeValues returns an array of possible values for the UdfType const type. +func PossibleUdfTypeValues() []UdfType { + return []UdfType{Scalar} +} + +// AvroSerialization describes how data from an input is serialized or how data is serialized when written +// to an output in Avro format. +type AvroSerialization struct { + // Properties - The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. + Properties interface{} `json:"properties,omitempty"` + // Type - Possible values include: 'TypeSerialization', 'TypeAvro', 'TypeJSON', 'TypeCsv' + Type Type `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AvroSerialization. +func (as AvroSerialization) MarshalJSON() ([]byte, error) { + as.Type = TypeAvro + objectMap := make(map[string]interface{}) + if as.Properties != nil { + objectMap["properties"] = as.Properties + } + if as.Type != "" { + objectMap["type"] = as.Type + } + return json.Marshal(objectMap) +} + +// AsAvroSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsAvroSerialization() (*AvroSerialization, bool) { + return &as, true +} + +// AsJSONSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsJSONSerialization() (*JSONSerialization, bool) { + return nil, false +} + +// AsCsvSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsCsvSerialization() (*CsvSerialization, bool) { + return nil, false +} + +// AsSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsSerialization() (*Serialization, bool) { + return nil, false +} + +// AsBasicSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsBasicSerialization() (BasicSerialization, bool) { + return &as, true +} + +// AzureDataLakeStoreOutputDataSource describes an Azure Data Lake Store output data source. +type AzureDataLakeStoreOutputDataSource struct { + // AzureDataLakeStoreOutputDataSourceProperties - The properties that are associated with an Azure Data Lake Store output. Required on PUT (CreateOrReplace) requests. + *AzureDataLakeStoreOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) MarshalJSON() ([]byte, error) { + adlsods.Type = TypeMicrosoftDataLakeAccounts + objectMap := make(map[string]interface{}) + if adlsods.AzureDataLakeStoreOutputDataSourceProperties != nil { + objectMap["properties"] = adlsods.AzureDataLakeStoreOutputDataSourceProperties + } + if adlsods.Type != "" { + objectMap["type"] = adlsods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return &adlsods, true +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &adlsods, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureDataLakeStoreOutputDataSource struct. +func (adlsods *AzureDataLakeStoreOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureDataLakeStoreOutputDataSourceProperties AzureDataLakeStoreOutputDataSourceProperties + err = json.Unmarshal(*v, &azureDataLakeStoreOutputDataSourceProperties) + if err != nil { + return err + } + adlsods.AzureDataLakeStoreOutputDataSourceProperties = &azureDataLakeStoreOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + adlsods.Type = typeVar + } + } + } + + return nil +} + +// AzureDataLakeStoreOutputDataSourceProperties the properties that are associated with an Azure Data Lake +// Store. +type AzureDataLakeStoreOutputDataSourceProperties struct { + // AccountName - The name of the Azure Data Lake Store account. Required on PUT (CreateOrReplace) requests. + AccountName *string `json:"accountName,omitempty"` + // TenantID - The tenant id of the user used to obtain the refresh token. Required on PUT (CreateOrReplace) requests. + TenantID *string `json:"tenantId,omitempty"` + // FilePathPrefix - The location of the file to which the output should be written to. Required on PUT (CreateOrReplace) requests. + FilePathPrefix *string `json:"filePathPrefix,omitempty"` + // DateFormat - The date format. Wherever {date} appears in filePathPrefix, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in filePathPrefix, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` + // RefreshToken - A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. + RefreshToken *string `json:"refreshToken,omitempty"` + // TokenUserPrincipalName - The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` + // TokenUserDisplayName - The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` +} + +// AzureMachineLearningWebServiceFunctionBinding the binding to an Azure Machine Learning web service. +type AzureMachineLearningWebServiceFunctionBinding struct { + // AzureMachineLearningWebServiceFunctionBindingProperties - The binding properties associated with an Azure Machine learning web service. + *AzureMachineLearningWebServiceFunctionBindingProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeFunctionBinding', 'TypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeMicrosoftMachineLearningWebService' + Type TypeBasicFunctionBinding `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureMachineLearningWebServiceFunctionBinding. +func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) MarshalJSON() ([]byte, error) { + amlwsfb.Type = TypeMicrosoftMachineLearningWebService + objectMap := make(map[string]interface{}) + if amlwsfb.AzureMachineLearningWebServiceFunctionBindingProperties != nil { + objectMap["properties"] = amlwsfb.AzureMachineLearningWebServiceFunctionBindingProperties + } + if amlwsfb.Type != "" { + objectMap["type"] = amlwsfb.Type + } + return json.Marshal(objectMap) +} + +// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningWebServiceFunctionBinding. +func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { + return nil, false +} + +// AsAzureMachineLearningWebServiceFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningWebServiceFunctionBinding. +func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) AsAzureMachineLearningWebServiceFunctionBinding() (*AzureMachineLearningWebServiceFunctionBinding, bool) { + return &amlwsfb, true +} + +// AsFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningWebServiceFunctionBinding. +func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { + return nil, false +} + +// AsBasicFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningWebServiceFunctionBinding. +func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { + return &amlwsfb, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureMachineLearningWebServiceFunctionBinding struct. +func (amlwsfb *AzureMachineLearningWebServiceFunctionBinding) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureMachineLearningWebServiceFunctionBindingProperties AzureMachineLearningWebServiceFunctionBindingProperties + err = json.Unmarshal(*v, &azureMachineLearningWebServiceFunctionBindingProperties) + if err != nil { + return err + } + amlwsfb.AzureMachineLearningWebServiceFunctionBindingProperties = &azureMachineLearningWebServiceFunctionBindingProperties + } + case "type": + if v != nil { + var typeVar TypeBasicFunctionBinding + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + amlwsfb.Type = typeVar + } + } + } + + return nil +} + +// AzureMachineLearningWebServiceFunctionBindingProperties the binding properties associated with an Azure +// Machine learning web service. +type AzureMachineLearningWebServiceFunctionBindingProperties struct { + // Endpoint - The Request-Response execute endpoint of the Azure Machine Learning web service. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs + Endpoint *string `json:"endpoint,omitempty"` + // APIKey - The API key used to authenticate with Request-Response endpoint. + APIKey *string `json:"apiKey,omitempty"` + // Inputs - The inputs for the Azure Machine Learning web service endpoint. + Inputs *AzureMachineLearningWebServiceInputs `json:"inputs,omitempty"` + // Outputs - A list of outputs from the Azure Machine Learning web service endpoint execution. + Outputs *[]AzureMachineLearningWebServiceOutputColumn `json:"outputs,omitempty"` + // BatchSize - Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. + BatchSize *int32 `json:"batchSize,omitempty"` +} + +// AzureMachineLearningWebServiceFunctionBindingRetrievalProperties the binding retrieval properties +// associated with an Azure Machine learning web service. +type AzureMachineLearningWebServiceFunctionBindingRetrievalProperties struct { + // ExecuteEndpoint - The Request-Response execute endpoint of the Azure Machine Learning web service. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs + ExecuteEndpoint *string `json:"executeEndpoint,omitempty"` + // UdfType - The function type. Possible values include: 'Scalar' + UdfType UdfType `json:"udfType,omitempty"` +} + +// AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters the parameters needed to +// retrieve the default function definition for an Azure Machine Learning web service function. +type AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters struct { + // AzureMachineLearningWebServiceFunctionBindingRetrievalProperties - The binding retrieval properties associated with an Azure Machine learning web service. + *AzureMachineLearningWebServiceFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` + // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf' + BindingType BindingType `json:"bindingType,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + amlwsfrddp.BindingType = BindingTypeMicrosoftMachineLearningWebService + objectMap := make(map[string]interface{}) + if amlwsfrddp.AzureMachineLearningWebServiceFunctionBindingRetrievalProperties != nil { + objectMap["bindingRetrievalProperties"] = amlwsfrddp.AzureMachineLearningWebServiceFunctionBindingRetrievalProperties + } + if amlwsfrddp.BindingType != "" { + objectMap["bindingType"] = amlwsfrddp.BindingType + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, bool) { + return &amlwsfrddp, true +} + +// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { + return &amlwsfrddp, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters struct. +func (amlwsfrddp *AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "bindingRetrievalProperties": + if v != nil { + var azureMachineLearningWebServiceFunctionBindingRetrievalProperties AzureMachineLearningWebServiceFunctionBindingRetrievalProperties + err = json.Unmarshal(*v, &azureMachineLearningWebServiceFunctionBindingRetrievalProperties) + if err != nil { + return err + } + amlwsfrddp.AzureMachineLearningWebServiceFunctionBindingRetrievalProperties = &azureMachineLearningWebServiceFunctionBindingRetrievalProperties + } + case "bindingType": + if v != nil { + var bindingType BindingType + err = json.Unmarshal(*v, &bindingType) + if err != nil { + return err + } + amlwsfrddp.BindingType = bindingType + } + } + } + + return nil +} + +// AzureMachineLearningWebServiceInputColumn describes an input column for the Azure Machine Learning web +// service endpoint. +type AzureMachineLearningWebServiceInputColumn struct { + // Name - The name of the input column. + Name *string `json:"name,omitempty"` + // DataType - The (Azure Machine Learning supported) data type of the input column. A list of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . + DataType *string `json:"dataType,omitempty"` + // MapTo - The zero based index of the function parameter this input maps to. + MapTo *int32 `json:"mapTo,omitempty"` +} + +// AzureMachineLearningWebServiceInputs the inputs for the Azure Machine Learning web service endpoint. +type AzureMachineLearningWebServiceInputs struct { + // Name - The name of the input. This is the name provided while authoring the endpoint. + Name *string `json:"name,omitempty"` + // ColumnNames - A list of input columns for the Azure Machine Learning web service endpoint. + ColumnNames *[]AzureMachineLearningWebServiceInputColumn `json:"columnNames,omitempty"` +} + +// AzureMachineLearningWebServiceOutputColumn describes an output column for the Azure Machine Learning web +// service endpoint. +type AzureMachineLearningWebServiceOutputColumn struct { + // Name - The name of the output column. + Name *string `json:"name,omitempty"` + // DataType - The (Azure Machine Learning supported) data type of the output column. A list of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . + DataType *string `json:"dataType,omitempty"` +} + +// AzureSQLDatabaseDataSourceProperties the properties that are associated with an Azure SQL database data +// source. +type AzureSQLDatabaseDataSourceProperties struct { + // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Server *string `json:"server,omitempty"` + // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Database *string `json:"database,omitempty"` + // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + User *string `json:"user,omitempty"` + // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Password *string `json:"password,omitempty"` + // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` +} + +// AzureSQLDatabaseOutputDataSource describes an Azure SQL database output data source. +type AzureSQLDatabaseOutputDataSource struct { + // AzureSQLDatabaseOutputDataSourceProperties - The properties that are associated with an Azure SQL database output. Required on PUT (CreateOrReplace) requests. + *AzureSQLDatabaseOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) MarshalJSON() ([]byte, error) { + asdods.Type = TypeMicrosoftSQLServerDatabase + objectMap := make(map[string]interface{}) + if asdods.AzureSQLDatabaseOutputDataSourceProperties != nil { + objectMap["properties"] = asdods.AzureSQLDatabaseOutputDataSourceProperties + } + if asdods.Type != "" { + objectMap["type"] = asdods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return &asdods, true +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &asdods, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureSQLDatabaseOutputDataSource struct. +func (asdods *AzureSQLDatabaseOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureSQLDatabaseOutputDataSourceProperties AzureSQLDatabaseOutputDataSourceProperties + err = json.Unmarshal(*v, &azureSQLDatabaseOutputDataSourceProperties) + if err != nil { + return err + } + asdods.AzureSQLDatabaseOutputDataSourceProperties = &azureSQLDatabaseOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + asdods.Type = typeVar + } + } + } + + return nil +} + +// AzureSQLDatabaseOutputDataSourceProperties the properties that are associated with an Azure SQL database +// output. +type AzureSQLDatabaseOutputDataSourceProperties struct { + // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Server *string `json:"server,omitempty"` + // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Database *string `json:"database,omitempty"` + // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + User *string `json:"user,omitempty"` + // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Password *string `json:"password,omitempty"` + // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` +} + +// AzureTableOutputDataSource describes an Azure Table output data source. +type AzureTableOutputDataSource struct { + // AzureTableOutputDataSourceProperties - The properties that are associated with an Azure Table output. Required on PUT (CreateOrReplace) requests. + *AzureTableOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) MarshalJSON() ([]byte, error) { + atods.Type = TypeMicrosoftStorageTable + objectMap := make(map[string]interface{}) + if atods.AzureTableOutputDataSourceProperties != nil { + objectMap["properties"] = atods.AzureTableOutputDataSourceProperties + } + if atods.Type != "" { + objectMap["type"] = atods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return &atods, true +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &atods, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureTableOutputDataSource struct. +func (atods *AzureTableOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureTableOutputDataSourceProperties AzureTableOutputDataSourceProperties + err = json.Unmarshal(*v, &azureTableOutputDataSourceProperties) + if err != nil { + return err + } + atods.AzureTableOutputDataSourceProperties = &azureTableOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + atods.Type = typeVar + } + } + } + + return nil +} + +// AzureTableOutputDataSourceProperties the properties that are associated with an Azure Table output. +type AzureTableOutputDataSourceProperties struct { + // AccountName - The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountName *string `json:"accountName,omitempty"` + // AccountKey - The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountKey *string `json:"accountKey,omitempty"` + // Table - The name of the Azure Table. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` + // PartitionKey - This element indicates the name of a column from the SELECT statement in the query that will be used as the partition key for the Azure Table. Required on PUT (CreateOrReplace) requests. + PartitionKey *string `json:"partitionKey,omitempty"` + // RowKey - This element indicates the name of a column from the SELECT statement in the query that will be used as the row key for the Azure Table. Required on PUT (CreateOrReplace) requests. + RowKey *string `json:"rowKey,omitempty"` + // ColumnsToRemove - If specified, each item in the array is the name of a column to remove (if present) from output event entities. + ColumnsToRemove *[]string `json:"columnsToRemove,omitempty"` + // BatchSize - The number of rows to write to the Azure Table at a time. + BatchSize *int32 `json:"batchSize,omitempty"` +} + +// BlobDataSourceProperties the properties that are associated with a blob data source. +type BlobDataSourceProperties struct { + // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. + Container *string `json:"container,omitempty"` + // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. + PathPattern *string `json:"pathPattern,omitempty"` + // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` +} + +// BlobOutputDataSource describes a blob output data source. +type BlobOutputDataSource struct { + // BlobOutputDataSourceProperties - The properties that are associated with a blob output. Required on PUT (CreateOrReplace) requests. + *BlobOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobOutputDataSource. +func (bods BlobOutputDataSource) MarshalJSON() ([]byte, error) { + bods.Type = TypeMicrosoftStorageBlob + objectMap := make(map[string]interface{}) + if bods.BlobOutputDataSourceProperties != nil { + objectMap["properties"] = bods.BlobOutputDataSourceProperties + } + if bods.Type != "" { + objectMap["type"] = bods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return &bods, true +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &bods, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobOutputDataSource struct. +func (bods *BlobOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobOutputDataSourceProperties BlobOutputDataSourceProperties + err = json.Unmarshal(*v, &blobOutputDataSourceProperties) + if err != nil { + return err + } + bods.BlobOutputDataSourceProperties = &blobOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bods.Type = typeVar + } + } + } + + return nil +} + +// BlobOutputDataSourceProperties the properties that are associated with a blob output. +type BlobOutputDataSourceProperties struct { + // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. + Container *string `json:"container,omitempty"` + // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. + PathPattern *string `json:"pathPattern,omitempty"` + // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` +} + +// BlobReferenceInputDataSource describes a blob input data source that contains reference data. +type BlobReferenceInputDataSource struct { + // BlobReferenceInputDataSourceProperties - The properties that are associated with a blob input containing reference data. Required on PUT (CreateOrReplace) requests. + *BlobReferenceInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource', 'TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob' + Type TypeBasicReferenceInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobReferenceInputDataSource. +func (brids BlobReferenceInputDataSource) MarshalJSON() ([]byte, error) { + brids.Type = TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob + objectMap := make(map[string]interface{}) + if brids.BlobReferenceInputDataSourceProperties != nil { + objectMap["properties"] = brids.BlobReferenceInputDataSourceProperties + } + if brids.Type != "" { + objectMap["type"] = brids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. +func (brids BlobReferenceInputDataSource) AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) { + return &brids, true +} + +// AsReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. +func (brids BlobReferenceInputDataSource) AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) { + return nil, false +} + +// AsBasicReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. +func (brids BlobReferenceInputDataSource) AsBasicReferenceInputDataSource() (BasicReferenceInputDataSource, bool) { + return &brids, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobReferenceInputDataSource struct. +func (brids *BlobReferenceInputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobReferenceInputDataSourceProperties BlobReferenceInputDataSourceProperties + err = json.Unmarshal(*v, &blobReferenceInputDataSourceProperties) + if err != nil { + return err + } + brids.BlobReferenceInputDataSourceProperties = &blobReferenceInputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicReferenceInputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + brids.Type = typeVar + } + } + } + + return nil +} + +// BlobReferenceInputDataSourceProperties the properties that are associated with a blob input containing +// reference data. +type BlobReferenceInputDataSourceProperties struct { + // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. + Container *string `json:"container,omitempty"` + // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. + PathPattern *string `json:"pathPattern,omitempty"` + // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` +} + +// BlobStreamInputDataSource describes a blob input data source that contains stream data. +type BlobStreamInputDataSource struct { + // BlobStreamInputDataSourceProperties - The properties that are associated with a blob input containing stream data. Required on PUT (CreateOrReplace) requests. + *BlobStreamInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob' + Type TypeBasicStreamInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) MarshalJSON() ([]byte, error) { + bsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob + objectMap := make(map[string]interface{}) + if bsids.BlobStreamInputDataSourceProperties != nil { + objectMap["properties"] = bsids.BlobStreamInputDataSourceProperties + } + if bsids.Type != "" { + objectMap["type"] = bsids.Type + } + return json.Marshal(objectMap) +} + +// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { + return nil, false +} + +// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { + return &bsids, true +} + +// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { + return nil, false +} + +// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { + return &bsids, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobStreamInputDataSource struct. +func (bsids *BlobStreamInputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobStreamInputDataSourceProperties BlobStreamInputDataSourceProperties + err = json.Unmarshal(*v, &blobStreamInputDataSourceProperties) + if err != nil { + return err + } + bsids.BlobStreamInputDataSourceProperties = &blobStreamInputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicStreamInputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bsids.Type = typeVar + } + } + } + + return nil +} + +// BlobStreamInputDataSourceProperties the properties that are associated with a blob input containing +// stream data. +type BlobStreamInputDataSourceProperties struct { + // SourcePartitionCount - The partition count of the blob input data source. Range 1 - 256. + SourcePartitionCount *int32 `json:"sourcePartitionCount,omitempty"` + // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. + Container *string `json:"container,omitempty"` + // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. + PathPattern *string `json:"pathPattern,omitempty"` + // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` +} + +// CsvSerialization describes how data from an input is serialized or how data is serialized when written +// to an output in CSV format. +type CsvSerialization struct { + // CsvSerializationProperties - The properties that are associated with the CSV serialization type. Required on PUT (CreateOrReplace) requests. + *CsvSerializationProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeSerialization', 'TypeAvro', 'TypeJSON', 'TypeCsv' + Type Type `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for CsvSerialization. +func (cs CsvSerialization) MarshalJSON() ([]byte, error) { + cs.Type = TypeCsv + objectMap := make(map[string]interface{}) + if cs.CsvSerializationProperties != nil { + objectMap["properties"] = cs.CsvSerializationProperties + } + if cs.Type != "" { + objectMap["type"] = cs.Type + } + return json.Marshal(objectMap) +} + +// AsAvroSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsAvroSerialization() (*AvroSerialization, bool) { + return nil, false +} + +// AsJSONSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsJSONSerialization() (*JSONSerialization, bool) { + return nil, false +} + +// AsCsvSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsCsvSerialization() (*CsvSerialization, bool) { + return &cs, true +} + +// AsSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsSerialization() (*Serialization, bool) { + return nil, false +} + +// AsBasicSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsBasicSerialization() (BasicSerialization, bool) { + return &cs, true +} + +// UnmarshalJSON is the custom unmarshaler for CsvSerialization struct. +func (cs *CsvSerialization) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var csvSerializationProperties CsvSerializationProperties + err = json.Unmarshal(*v, &csvSerializationProperties) + if err != nil { + return err + } + cs.CsvSerializationProperties = &csvSerializationProperties + } + case "type": + if v != nil { + var typeVar Type + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + cs.Type = typeVar + } + } + } + + return nil +} + +// CsvSerializationProperties the properties that are associated with the CSV serialization type. +type CsvSerializationProperties struct { + // FieldDelimiter - Specifies the delimiter that will be used to separate comma-separated value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a list of supported values. Required on PUT (CreateOrReplace) requests. + FieldDelimiter *string `json:"fieldDelimiter,omitempty"` + // Encoding - Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. Possible values include: 'UTF8' + Encoding Encoding `json:"encoding,omitempty"` +} + +// DiagnosticCondition condition applicable to the resource, or to the job overall, that warrant customer +// attention. +type DiagnosticCondition struct { + // Since - The UTC timestamp of when the condition started. Customers should be able to find a corresponding event in the ops log around this time. + Since *string `json:"since,omitempty"` + // Code - The opaque diagnostic code. + Code *string `json:"code,omitempty"` + // Message - The human-readable message describing the condition in detail. Localized in the Accept-Language of the client request. + Message *string `json:"message,omitempty"` +} + +// Diagnostics describes conditions applicable to the Input, Output, or the job overall, that warrant +// customer attention. +type Diagnostics struct { + // Conditions - A collection of zero or more conditions applicable to the resource, or to the job overall, that warrant customer attention. + Conditions *[]DiagnosticCondition `json:"conditions,omitempty"` +} + +// DocumentDbOutputDataSource describes a DocumentDB output data source. +type DocumentDbOutputDataSource struct { + // DocumentDbOutputDataSourceProperties - The properties that are associated with a DocumentDB output. Required on PUT (CreateOrReplace) requests. + *DocumentDbOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) MarshalJSON() ([]byte, error) { + ddods.Type = TypeMicrosoftStorageDocumentDB + objectMap := make(map[string]interface{}) + if ddods.DocumentDbOutputDataSourceProperties != nil { + objectMap["properties"] = ddods.DocumentDbOutputDataSourceProperties + } + if ddods.Type != "" { + objectMap["type"] = ddods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return &ddods, true +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &ddods, true +} + +// UnmarshalJSON is the custom unmarshaler for DocumentDbOutputDataSource struct. +func (ddods *DocumentDbOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var documentDbOutputDataSourceProperties DocumentDbOutputDataSourceProperties + err = json.Unmarshal(*v, &documentDbOutputDataSourceProperties) + if err != nil { + return err + } + ddods.DocumentDbOutputDataSourceProperties = &documentDbOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ddods.Type = typeVar + } + } + } + + return nil +} + +// DocumentDbOutputDataSourceProperties the properties that are associated with a DocumentDB output. +type DocumentDbOutputDataSourceProperties struct { + // AccountID - The DocumentDB account name or ID. Required on PUT (CreateOrReplace) requests. + AccountID *string `json:"accountId,omitempty"` + // AccountKey - The account key for the DocumentDB account. Required on PUT (CreateOrReplace) requests. + AccountKey *string `json:"accountKey,omitempty"` + // Database - The name of the DocumentDB database. Required on PUT (CreateOrReplace) requests. + Database *string `json:"database,omitempty"` + // CollectionNamePattern - The collection name pattern for the collections to be used. The collection name format can be constructed using the optional {partition} token, where partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT (CreateOrReplace) requests. + CollectionNamePattern *string `json:"collectionNamePattern,omitempty"` + // PartitionKey - The name of the field in output events used to specify the key for partitioning output across collections. If 'collectionNamePattern' contains the {partition} token, this property is required to be specified. + PartitionKey *string `json:"partitionKey,omitempty"` + // DocumentID - The name of the field in output events used to specify the primary key which insert or update operations are based on. + DocumentID *string `json:"documentId,omitempty"` +} + +// ErrorResponse describes the error that occurred. +type ErrorResponse struct { + // Code - Error code associated with the error that occurred. + Code *string `json:"code,omitempty"` + // Message - Describes the error in detail. + Message *string `json:"message,omitempty"` +} + +// EventHubDataSourceProperties the common properties that are associated with Event Hub data sources. +type EventHubDataSourceProperties struct { + // EventHubName - The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + EventHubName *string `json:"eventHubName,omitempty"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` +} + +// EventHubOutputDataSource describes an Event Hub output data source. +type EventHubOutputDataSource struct { + // EventHubOutputDataSourceProperties - The properties that are associated with an Event Hub output. Required on PUT (CreateOrReplace) requests. + *EventHubOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) MarshalJSON() ([]byte, error) { + ehods.Type = TypeMicrosoftServiceBusEventHub + objectMap := make(map[string]interface{}) + if ehods.EventHubOutputDataSourceProperties != nil { + objectMap["properties"] = ehods.EventHubOutputDataSourceProperties + } + if ehods.Type != "" { + objectMap["type"] = ehods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return &ehods, true +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &ehods, true +} + +// UnmarshalJSON is the custom unmarshaler for EventHubOutputDataSource struct. +func (ehods *EventHubOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var eventHubOutputDataSourceProperties EventHubOutputDataSourceProperties + err = json.Unmarshal(*v, &eventHubOutputDataSourceProperties) + if err != nil { + return err + } + ehods.EventHubOutputDataSourceProperties = &eventHubOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ehods.Type = typeVar + } + } + } + + return nil +} + +// EventHubOutputDataSourceProperties the properties that are associated with an Event Hub output. +type EventHubOutputDataSourceProperties struct { + // PartitionKey - The key/column that is used to determine to which partition to send event data. + PartitionKey *string `json:"partitionKey,omitempty"` + // EventHubName - The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + EventHubName *string `json:"eventHubName,omitempty"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` +} + +// EventHubStreamInputDataSource describes an Event Hub input data source that contains stream data. +type EventHubStreamInputDataSource struct { + // EventHubStreamInputDataSourceProperties - The properties that are associated with an Event Hub input containing stream data. Required on PUT (CreateOrReplace) requests. + *EventHubStreamInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob' + Type TypeBasicStreamInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) MarshalJSON() ([]byte, error) { + ehsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub + objectMap := make(map[string]interface{}) + if ehsids.EventHubStreamInputDataSourceProperties != nil { + objectMap["properties"] = ehsids.EventHubStreamInputDataSourceProperties + } + if ehsids.Type != "" { + objectMap["type"] = ehsids.Type + } + return json.Marshal(objectMap) +} + +// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { + return &ehsids, true +} + +// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { + return nil, false +} + +// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { + return nil, false +} + +// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { + return &ehsids, true +} + +// UnmarshalJSON is the custom unmarshaler for EventHubStreamInputDataSource struct. +func (ehsids *EventHubStreamInputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var eventHubStreamInputDataSourceProperties EventHubStreamInputDataSourceProperties + err = json.Unmarshal(*v, &eventHubStreamInputDataSourceProperties) + if err != nil { + return err + } + ehsids.EventHubStreamInputDataSourceProperties = &eventHubStreamInputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicStreamInputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ehsids.Type = typeVar + } + } + } + + return nil +} + +// EventHubStreamInputDataSourceProperties the properties that are associated with a Event Hub input +// containing stream data. +type EventHubStreamInputDataSourceProperties struct { + // ConsumerGroupName - The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. + ConsumerGroupName *string `json:"consumerGroupName,omitempty"` + // EventHubName - The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + EventHubName *string `json:"eventHubName,omitempty"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` +} + +// Function a function object, containing all information associated with the named function. All functions +// are contained under a streaming job. +type Function struct { + autorest.Response `json:"-"` + // Properties - The properties that are associated with a function. + Properties BasicFunctionProperties `json:"properties,omitempty"` + // ID - Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - Resource type + Type *string `json:"type,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for Function struct. +func (f *Function) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + properties, err := unmarshalBasicFunctionProperties(*v) + if err != nil { + return err + } + f.Properties = properties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + f.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + f.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + f.Type = &typeVar + } + } + } + + return nil +} + +// BasicFunctionBinding the physical binding of the function. For example, in the Azure Machine Learning web service’s +// case, this describes the endpoint. +type BasicFunctionBinding interface { + AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) + AsAzureMachineLearningWebServiceFunctionBinding() (*AzureMachineLearningWebServiceFunctionBinding, bool) + AsFunctionBinding() (*FunctionBinding, bool) +} + +// FunctionBinding the physical binding of the function. For example, in the Azure Machine Learning web +// service’s case, this describes the endpoint. +type FunctionBinding struct { + // Type - Possible values include: 'TypeFunctionBinding', 'TypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeMicrosoftMachineLearningWebService' + Type TypeBasicFunctionBinding `json:"type,omitempty"` +} + +func unmarshalBasicFunctionBinding(body []byte) (BasicFunctionBinding, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeMicrosoftStreamAnalyticsJavascriptUdf): + var jsfb JavaScriptFunctionBinding + err := json.Unmarshal(body, &jsfb) + return jsfb, err + case string(TypeMicrosoftMachineLearningWebService): + var amlwsfb AzureMachineLearningWebServiceFunctionBinding + err := json.Unmarshal(body, &amlwsfb) + return amlwsfb, err + default: + var fb FunctionBinding + err := json.Unmarshal(body, &fb) + return fb, err + } +} +func unmarshalBasicFunctionBindingArray(body []byte) ([]BasicFunctionBinding, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + fbArray := make([]BasicFunctionBinding, len(rawMessages)) + + for index, rawMessage := range rawMessages { + fb, err := unmarshalBasicFunctionBinding(*rawMessage) + if err != nil { + return nil, err + } + fbArray[index] = fb + } + return fbArray, nil +} + +// MarshalJSON is the custom marshaler for FunctionBinding. +func (fb FunctionBinding) MarshalJSON() ([]byte, error) { + fb.Type = TypeFunctionBinding + objectMap := make(map[string]interface{}) + if fb.Type != "" { + objectMap["type"] = fb.Type + } + return json.Marshal(objectMap) +} + +// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { + return nil, false +} + +// AsAzureMachineLearningWebServiceFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsAzureMachineLearningWebServiceFunctionBinding() (*AzureMachineLearningWebServiceFunctionBinding, bool) { + return nil, false +} + +// AsFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { + return &fb, true +} + +// AsBasicFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { + return &fb, true +} + +// FunctionInput describes one input parameter of a function. +type FunctionInput struct { + // DataType - The (Azure Stream Analytics supported) data type of the function input parameter. A list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx + DataType *string `json:"dataType,omitempty"` + // IsConfigurationParameter - A flag indicating if the parameter is a configuration parameter. True if this input parameter is expected to be a constant. Default is false. + IsConfigurationParameter *bool `json:"isConfigurationParameter,omitempty"` +} + +// FunctionListResult object containing a list of functions under a streaming job. +type FunctionListResult struct { + autorest.Response `json:"-"` + // Value - A list of functions under a streaming job. Populated by a 'List' operation. + Value *[]Function `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// FunctionListResultIterator provides access to a complete listing of Function values. +type FunctionListResultIterator struct { + i int + page FunctionListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *FunctionListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *FunctionListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter FunctionListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter FunctionListResultIterator) Response() FunctionListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter FunctionListResultIterator) Value() Function { + if !iter.page.NotDone() { + return Function{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the FunctionListResultIterator type. +func NewFunctionListResultIterator(page FunctionListResultPage) FunctionListResultIterator { + return FunctionListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (flr FunctionListResult) IsEmpty() bool { + return flr.Value == nil || len(*flr.Value) == 0 +} + +// functionListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (flr FunctionListResult) functionListResultPreparer(ctx context.Context) (*http.Request, error) { + if flr.NextLink == nil || len(to.String(flr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(flr.NextLink))) +} + +// FunctionListResultPage contains a page of Function values. +type FunctionListResultPage struct { + fn func(context.Context, FunctionListResult) (FunctionListResult, error) + flr FunctionListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *FunctionListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.flr) + if err != nil { + return err + } + page.flr = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *FunctionListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page FunctionListResultPage) NotDone() bool { + return !page.flr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page FunctionListResultPage) Response() FunctionListResult { + return page.flr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page FunctionListResultPage) Values() []Function { + if page.flr.IsEmpty() { + return nil + } + return *page.flr.Value +} + +// Creates a new instance of the FunctionListResultPage type. +func NewFunctionListResultPage(getNextPage func(context.Context, FunctionListResult) (FunctionListResult, error)) FunctionListResultPage { + return FunctionListResultPage{fn: getNextPage} +} + +// FunctionOutput describes the output of a function. +type FunctionOutput struct { + // DataType - The (Azure Stream Analytics supported) data type of the function output. A list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx + DataType *string `json:"dataType,omitempty"` +} + +// BasicFunctionProperties the properties that are associated with a function. +type BasicFunctionProperties interface { + AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) + AsFunctionProperties() (*FunctionProperties, bool) +} + +// FunctionProperties the properties that are associated with a function. +type FunctionProperties struct { + // Etag - The current entity tag for the function. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + // Type - Possible values include: 'TypeFunctionProperties', 'TypeScalar' + Type TypeBasicFunctionProperties `json:"type,omitempty"` +} + +func unmarshalBasicFunctionProperties(body []byte) (BasicFunctionProperties, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeScalar): + var sfp ScalarFunctionProperties + err := json.Unmarshal(body, &sfp) + return sfp, err + default: + var fp FunctionProperties + err := json.Unmarshal(body, &fp) + return fp, err + } +} +func unmarshalBasicFunctionPropertiesArray(body []byte) ([]BasicFunctionProperties, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + fpArray := make([]BasicFunctionProperties, len(rawMessages)) + + for index, rawMessage := range rawMessages { + fp, err := unmarshalBasicFunctionProperties(*rawMessage) + if err != nil { + return nil, err + } + fpArray[index] = fp + } + return fpArray, nil +} + +// MarshalJSON is the custom marshaler for FunctionProperties. +func (fp FunctionProperties) MarshalJSON() ([]byte, error) { + fp.Type = TypeFunctionProperties + objectMap := make(map[string]interface{}) + if fp.Etag != nil { + objectMap["etag"] = fp.Etag + } + if fp.Type != "" { + objectMap["type"] = fp.Type + } + return json.Marshal(objectMap) +} + +// AsScalarFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. +func (fp FunctionProperties) AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) { + return nil, false +} + +// AsFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. +func (fp FunctionProperties) AsFunctionProperties() (*FunctionProperties, bool) { + return &fp, true +} + +// AsBasicFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. +func (fp FunctionProperties) AsBasicFunctionProperties() (BasicFunctionProperties, bool) { + return &fp, true +} + +// BasicFunctionRetrieveDefaultDefinitionParameters parameters used to specify the type of function to retrieve the +// default definition for. +type BasicFunctionRetrieveDefaultDefinitionParameters interface { + AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, bool) + AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) + AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) +} + +// FunctionRetrieveDefaultDefinitionParameters parameters used to specify the type of function to retrieve the +// default definition for. +type FunctionRetrieveDefaultDefinitionParameters struct { + // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf' + BindingType BindingType `json:"bindingType,omitempty"` +} + +func unmarshalBasicFunctionRetrieveDefaultDefinitionParameters(body []byte) (BasicFunctionRetrieveDefaultDefinitionParameters, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["bindingType"] { + case string(BindingTypeMicrosoftMachineLearningWebService): + var amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters + err := json.Unmarshal(body, &amlwsfrddp) + return amlwsfrddp, err + case string(BindingTypeMicrosoftStreamAnalyticsJavascriptUdf): + var jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters + err := json.Unmarshal(body, &jsfrddp) + return jsfrddp, err + default: + var frddp FunctionRetrieveDefaultDefinitionParameters + err := json.Unmarshal(body, &frddp) + return frddp, err + } +} +func unmarshalBasicFunctionRetrieveDefaultDefinitionParametersArray(body []byte) ([]BasicFunctionRetrieveDefaultDefinitionParameters, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + frddpArray := make([]BasicFunctionRetrieveDefaultDefinitionParameters, len(rawMessages)) + + for index, rawMessage := range rawMessages { + frddp, err := unmarshalBasicFunctionRetrieveDefaultDefinitionParameters(*rawMessage) + if err != nil { + return nil, err + } + frddpArray[index] = frddp + } + return frddpArray, nil +} + +// MarshalJSON is the custom marshaler for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + frddp.BindingType = BindingTypeFunctionRetrieveDefaultDefinitionParameters + objectMap := make(map[string]interface{}) + if frddp.BindingType != "" { + objectMap["bindingType"] = frddp.BindingType + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { + return &frddp, true +} + +// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { + return &frddp, true +} + +// FunctionsTestFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type FunctionsTestFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *FunctionsTestFuture) Result(client FunctionsClient) (rts ResourceTestStatus, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsTestFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.FunctionsTestFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if rts.Response.Response, err = future.GetResult(sender); err == nil && rts.Response.Response.StatusCode != http.StatusNoContent { + rts, err = client.TestResponder(rts.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsTestFuture", "Result", rts.Response.Response, "Failure responding to request") + } + } + return +} + +// Input an input object, containing all information associated with the named input. All inputs are +// contained under a streaming job. +type Input struct { + autorest.Response `json:"-"` + // Properties - The properties that are associated with an input. Required on PUT (CreateOrReplace) requests. + Properties BasicInputProperties `json:"properties,omitempty"` + // ID - Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - Resource type + Type *string `json:"type,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for Input struct. +func (i *Input) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + properties, err := unmarshalBasicInputProperties(*v) + if err != nil { + return err + } + i.Properties = properties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + i.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + i.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + i.Type = &typeVar + } + } + } + + return nil +} + +// InputListResult object containing a list of inputs under a streaming job. +type InputListResult struct { + autorest.Response `json:"-"` + // Value - A list of inputs under a streaming job. Populated by a 'List' operation. + Value *[]Input `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// InputListResultIterator provides access to a complete listing of Input values. +type InputListResultIterator struct { + i int + page InputListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *InputListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *InputListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter InputListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter InputListResultIterator) Response() InputListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter InputListResultIterator) Value() Input { + if !iter.page.NotDone() { + return Input{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the InputListResultIterator type. +func NewInputListResultIterator(page InputListResultPage) InputListResultIterator { + return InputListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (ilr InputListResult) IsEmpty() bool { + return ilr.Value == nil || len(*ilr.Value) == 0 +} + +// inputListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (ilr InputListResult) inputListResultPreparer(ctx context.Context) (*http.Request, error) { + if ilr.NextLink == nil || len(to.String(ilr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(ilr.NextLink))) +} + +// InputListResultPage contains a page of Input values. +type InputListResultPage struct { + fn func(context.Context, InputListResult) (InputListResult, error) + ilr InputListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *InputListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.ilr) + if err != nil { + return err + } + page.ilr = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *InputListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page InputListResultPage) NotDone() bool { + return !page.ilr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page InputListResultPage) Response() InputListResult { + return page.ilr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page InputListResultPage) Values() []Input { + if page.ilr.IsEmpty() { + return nil + } + return *page.ilr.Value +} + +// Creates a new instance of the InputListResultPage type. +func NewInputListResultPage(getNextPage func(context.Context, InputListResult) (InputListResult, error)) InputListResultPage { + return InputListResultPage{fn: getNextPage} +} + +// BasicInputProperties the properties that are associated with an input. +type BasicInputProperties interface { + AsReferenceInputProperties() (*ReferenceInputProperties, bool) + AsStreamInputProperties() (*StreamInputProperties, bool) + AsInputProperties() (*InputProperties, bool) +} + +// InputProperties the properties that are associated with an input. +type InputProperties struct { + // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. + Serialization BasicSerialization `json:"serialization,omitempty"` + // Diagnostics - Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + // Etag - The current entity tag for the input. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + // Type - Possible values include: 'TypeInputProperties', 'TypeReference', 'TypeStream' + Type TypeBasicInputProperties `json:"type,omitempty"` +} + +func unmarshalBasicInputProperties(body []byte) (BasicInputProperties, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeReference): + var rip ReferenceInputProperties + err := json.Unmarshal(body, &rip) + return rip, err + case string(TypeStream): + var sip StreamInputProperties + err := json.Unmarshal(body, &sip) + return sip, err + default: + var IP InputProperties + err := json.Unmarshal(body, &IP) + return IP, err + } +} +func unmarshalBasicInputPropertiesArray(body []byte) ([]BasicInputProperties, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + IPArray := make([]BasicInputProperties, len(rawMessages)) + + for index, rawMessage := range rawMessages { + IP, err := unmarshalBasicInputProperties(*rawMessage) + if err != nil { + return nil, err + } + IPArray[index] = IP + } + return IPArray, nil +} + +// MarshalJSON is the custom marshaler for InputProperties. +func (IP InputProperties) MarshalJSON() ([]byte, error) { + IP.Type = TypeInputProperties + objectMap := make(map[string]interface{}) + objectMap["serialization"] = IP.Serialization + if IP.Diagnostics != nil { + objectMap["diagnostics"] = IP.Diagnostics + } + if IP.Etag != nil { + objectMap["etag"] = IP.Etag + } + if IP.Type != "" { + objectMap["type"] = IP.Type + } + return json.Marshal(objectMap) +} + +// AsReferenceInputProperties is the BasicInputProperties implementation for InputProperties. +func (IP InputProperties) AsReferenceInputProperties() (*ReferenceInputProperties, bool) { + return nil, false +} + +// AsStreamInputProperties is the BasicInputProperties implementation for InputProperties. +func (IP InputProperties) AsStreamInputProperties() (*StreamInputProperties, bool) { + return nil, false +} + +// AsInputProperties is the BasicInputProperties implementation for InputProperties. +func (IP InputProperties) AsInputProperties() (*InputProperties, bool) { + return &IP, true +} + +// AsBasicInputProperties is the BasicInputProperties implementation for InputProperties. +func (IP InputProperties) AsBasicInputProperties() (BasicInputProperties, bool) { + return &IP, true +} + +// UnmarshalJSON is the custom unmarshaler for InputProperties struct. +func (IP *InputProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "serialization": + if v != nil { + serialization, err := unmarshalBasicSerialization(*v) + if err != nil { + return err + } + IP.Serialization = serialization + } + case "diagnostics": + if v != nil { + var diagnostics Diagnostics + err = json.Unmarshal(*v, &diagnostics) + if err != nil { + return err + } + IP.Diagnostics = &diagnostics + } + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + IP.Etag = &etag + } + case "type": + if v != nil { + var typeVar TypeBasicInputProperties + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + IP.Type = typeVar + } + } + } + + return nil +} + +// InputsTestFuture an abstraction for monitoring and retrieving the results of a long-running operation. +type InputsTestFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *InputsTestFuture) Result(client InputsClient) (rts ResourceTestStatus, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsTestFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.InputsTestFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if rts.Response.Response, err = future.GetResult(sender); err == nil && rts.Response.Response.StatusCode != http.StatusNoContent { + rts, err = client.TestResponder(rts.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsTestFuture", "Result", rts.Response.Response, "Failure responding to request") + } + } + return +} + +// IoTHubStreamInputDataSource describes an IoT Hub input data source that contains stream data. +type IoTHubStreamInputDataSource struct { + // IoTHubStreamInputDataSourceProperties - The properties that are associated with an IoT Hub input containing stream data. Required on PUT (CreateOrReplace) requests. + *IoTHubStreamInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob' + Type TypeBasicStreamInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) MarshalJSON() ([]byte, error) { + ithsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs + objectMap := make(map[string]interface{}) + if ithsids.IoTHubStreamInputDataSourceProperties != nil { + objectMap["properties"] = ithsids.IoTHubStreamInputDataSourceProperties + } + if ithsids.Type != "" { + objectMap["type"] = ithsids.Type + } + return json.Marshal(objectMap) +} + +// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { + return &ithsids, true +} + +// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { + return nil, false +} + +// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { + return nil, false +} + +// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { + return nil, false +} + +// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { + return &ithsids, true +} + +// UnmarshalJSON is the custom unmarshaler for IoTHubStreamInputDataSource struct. +func (ithsids *IoTHubStreamInputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var ioTHubStreamInputDataSourceProperties IoTHubStreamInputDataSourceProperties + err = json.Unmarshal(*v, &ioTHubStreamInputDataSourceProperties) + if err != nil { + return err + } + ithsids.IoTHubStreamInputDataSourceProperties = &ioTHubStreamInputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicStreamInputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ithsids.Type = typeVar + } + } + } + + return nil +} + +// IoTHubStreamInputDataSourceProperties the properties that are associated with a IoT Hub input containing +// stream data. +type IoTHubStreamInputDataSourceProperties struct { + // IotHubNamespace - The name or the URI of the IoT Hub. Required on PUT (CreateOrReplace) requests. + IotHubNamespace *string `json:"iotHubNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the IoT Hub. This policy must contain at least the Service connect permission. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + // ConsumerGroupName - The name of an IoT Hub Consumer Group that should be used to read events from the IoT Hub. If not specified, the input uses the Iot Hub’s default consumer group. + ConsumerGroupName *string `json:"consumerGroupName,omitempty"` + // Endpoint - The IoT Hub endpoint to connect to (ie. messages/events, messages/operationsMonitoringEvents, etc.). + Endpoint *string `json:"endpoint,omitempty"` +} + +// JavaScriptFunctionBinding the binding to a JavaScript function. +type JavaScriptFunctionBinding struct { + // JavaScriptFunctionBindingProperties - The binding properties associated with a JavaScript function. + *JavaScriptFunctionBindingProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeFunctionBinding', 'TypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeMicrosoftMachineLearningWebService' + Type TypeBasicFunctionBinding `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) MarshalJSON() ([]byte, error) { + jsfb.Type = TypeMicrosoftStreamAnalyticsJavascriptUdf + objectMap := make(map[string]interface{}) + if jsfb.JavaScriptFunctionBindingProperties != nil { + objectMap["properties"] = jsfb.JavaScriptFunctionBindingProperties + } + if jsfb.Type != "" { + objectMap["type"] = jsfb.Type + } + return json.Marshal(objectMap) +} + +// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { + return &jsfb, true +} + +// AsAzureMachineLearningWebServiceFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsAzureMachineLearningWebServiceFunctionBinding() (*AzureMachineLearningWebServiceFunctionBinding, bool) { + return nil, false +} + +// AsFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { + return nil, false +} + +// AsBasicFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { + return &jsfb, true +} + +// UnmarshalJSON is the custom unmarshaler for JavaScriptFunctionBinding struct. +func (jsfb *JavaScriptFunctionBinding) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var javaScriptFunctionBindingProperties JavaScriptFunctionBindingProperties + err = json.Unmarshal(*v, &javaScriptFunctionBindingProperties) + if err != nil { + return err + } + jsfb.JavaScriptFunctionBindingProperties = &javaScriptFunctionBindingProperties + } + case "type": + if v != nil { + var typeVar TypeBasicFunctionBinding + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + jsfb.Type = typeVar + } + } + } + + return nil +} + +// JavaScriptFunctionBindingProperties the binding properties associated with a JavaScript function. +type JavaScriptFunctionBindingProperties struct { + // Script - The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }' + Script *string `json:"script,omitempty"` +} + +// JavaScriptFunctionBindingRetrievalProperties the binding retrieval properties associated with a +// JavaScript function. +type JavaScriptFunctionBindingRetrievalProperties struct { + // Script - The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. + Script *string `json:"script,omitempty"` + // UdfType - The function type. Possible values include: 'Scalar' + UdfType UdfType `json:"udfType,omitempty"` +} + +// JavaScriptFunctionRetrieveDefaultDefinitionParameters the parameters needed to retrieve the default +// function definition for a JavaScript function. +type JavaScriptFunctionRetrieveDefaultDefinitionParameters struct { + // JavaScriptFunctionBindingRetrievalProperties - The binding retrieval properties associated with a JavaScript function. + *JavaScriptFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` + // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf' + BindingType BindingType `json:"bindingType,omitempty"` +} + +// MarshalJSON is the custom marshaler for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + jsfrddp.BindingType = BindingTypeMicrosoftStreamAnalyticsJavascriptUdf + objectMap := make(map[string]interface{}) + if jsfrddp.JavaScriptFunctionBindingRetrievalProperties != nil { + objectMap["bindingRetrievalProperties"] = jsfrddp.JavaScriptFunctionBindingRetrievalProperties + } + if jsfrddp.BindingType != "" { + objectMap["bindingType"] = jsfrddp.BindingType + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { + return &jsfrddp, true +} + +// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { + return &jsfrddp, true +} + +// UnmarshalJSON is the custom unmarshaler for JavaScriptFunctionRetrieveDefaultDefinitionParameters struct. +func (jsfrddp *JavaScriptFunctionRetrieveDefaultDefinitionParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "bindingRetrievalProperties": + if v != nil { + var javaScriptFunctionBindingRetrievalProperties JavaScriptFunctionBindingRetrievalProperties + err = json.Unmarshal(*v, &javaScriptFunctionBindingRetrievalProperties) + if err != nil { + return err + } + jsfrddp.JavaScriptFunctionBindingRetrievalProperties = &javaScriptFunctionBindingRetrievalProperties + } + case "bindingType": + if v != nil { + var bindingType BindingType + err = json.Unmarshal(*v, &bindingType) + if err != nil { + return err + } + jsfrddp.BindingType = bindingType + } + } + } + + return nil +} + +// JSONSerialization describes how data from an input is serialized or how data is serialized when written +// to an output in JSON format. +type JSONSerialization struct { + // JSONSerializationProperties - The properties that are associated with the JSON serialization type. Required on PUT (CreateOrReplace) requests. + *JSONSerializationProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeSerialization', 'TypeAvro', 'TypeJSON', 'TypeCsv' + Type Type `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for JSONSerialization. +func (js JSONSerialization) MarshalJSON() ([]byte, error) { + js.Type = TypeJSON + objectMap := make(map[string]interface{}) + if js.JSONSerializationProperties != nil { + objectMap["properties"] = js.JSONSerializationProperties + } + if js.Type != "" { + objectMap["type"] = js.Type + } + return json.Marshal(objectMap) +} + +// AsAvroSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsAvroSerialization() (*AvroSerialization, bool) { + return nil, false +} + +// AsJSONSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsJSONSerialization() (*JSONSerialization, bool) { + return &js, true +} + +// AsCsvSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsCsvSerialization() (*CsvSerialization, bool) { + return nil, false +} + +// AsSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsSerialization() (*Serialization, bool) { + return nil, false +} + +// AsBasicSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsBasicSerialization() (BasicSerialization, bool) { + return &js, true +} + +// UnmarshalJSON is the custom unmarshaler for JSONSerialization struct. +func (js *JSONSerialization) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var JSONSerializationProperties JSONSerializationProperties + err = json.Unmarshal(*v, &JSONSerializationProperties) + if err != nil { + return err + } + js.JSONSerializationProperties = &JSONSerializationProperties + } + case "type": + if v != nil { + var typeVar Type + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + js.Type = typeVar + } + } + } + + return nil +} + +// JSONSerializationProperties the properties that are associated with the JSON serialization type. +type JSONSerializationProperties struct { + // Encoding - Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. Possible values include: 'UTF8' + Encoding Encoding `json:"encoding,omitempty"` + // Format - This property only applies to JSON serialization of outputs only. It is not applicable to inputs. This property specifies the format of the JSON the output will be written in. The currently supported values are 'lineSeparated' indicating the output will be formatted by having each JSON object separated by a new line and 'array' indicating the output will be formatted as an array of JSON objects. Default value is 'lineSeparated' if left null. Possible values include: 'LineSeparated', 'Array' + Format JSONOutputSerializationFormat `json:"format,omitempty"` +} + +// OAuthBasedDataSourceProperties the properties that are associated with data sources that use OAuth as +// their authentication model. +type OAuthBasedDataSourceProperties struct { + // RefreshToken - A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. + RefreshToken *string `json:"refreshToken,omitempty"` + // TokenUserPrincipalName - The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` + // TokenUserDisplayName - The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` +} + +// Operation a Stream Analytics REST API operation +type Operation struct { + // Name - The name of the operation being performed on this particular object. + Name *string `json:"name,omitempty"` + // Display - Contains the localized display information for this particular operation / action. + Display *OperationDisplay `json:"display,omitempty"` +} + +// OperationDisplay contains the localized display information for this particular operation / action. +type OperationDisplay struct { + // Provider - The localized friendly form of the resource provider name. + Provider *string `json:"provider,omitempty"` + // Resource - The localized friendly form of the resource type related to this action/operation. + Resource *string `json:"resource,omitempty"` + // Operation - The localized friendly name for the operation. + Operation *string `json:"operation,omitempty"` + // Description - The localized friendly description for the operation. + Description *string `json:"description,omitempty"` +} + +// OperationListResult result of the request to list Stream Analytics operations. It contains a list of +// operations and a URL link to get the next set of results. +type OperationListResult struct { + autorest.Response `json:"-"` + // Value - List of Stream Analytics operations supported by the Microsoft.StreamAnalytics resource provider. + Value *[]Operation `json:"value,omitempty"` + // NextLink - URL to get the next set of operation list results if there are any. + NextLink *string `json:"nextLink,omitempty"` +} + +// OperationListResultIterator provides access to a complete listing of Operation values. +type OperationListResultIterator struct { + i int + page OperationListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *OperationListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *OperationListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter OperationListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter OperationListResultIterator) Response() OperationListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter OperationListResultIterator) Value() Operation { + if !iter.page.NotDone() { + return Operation{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the OperationListResultIterator type. +func NewOperationListResultIterator(page OperationListResultPage) OperationListResultIterator { + return OperationListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (olr OperationListResult) IsEmpty() bool { + return olr.Value == nil || len(*olr.Value) == 0 +} + +// operationListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (olr OperationListResult) operationListResultPreparer(ctx context.Context) (*http.Request, error) { + if olr.NextLink == nil || len(to.String(olr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(olr.NextLink))) +} + +// OperationListResultPage contains a page of Operation values. +type OperationListResultPage struct { + fn func(context.Context, OperationListResult) (OperationListResult, error) + olr OperationListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *OperationListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.olr) + if err != nil { + return err + } + page.olr = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *OperationListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page OperationListResultPage) NotDone() bool { + return !page.olr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page OperationListResultPage) Response() OperationListResult { + return page.olr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page OperationListResultPage) Values() []Operation { + if page.olr.IsEmpty() { + return nil + } + return *page.olr.Value +} + +// Creates a new instance of the OperationListResultPage type. +func NewOperationListResultPage(getNextPage func(context.Context, OperationListResult) (OperationListResult, error)) OperationListResultPage { + return OperationListResultPage{fn: getNextPage} +} + +// Output an output object, containing all information associated with the named output. All outputs are +// contained under a streaming job. +type Output struct { + autorest.Response `json:"-"` + // OutputProperties - The properties that are associated with an output. Required on PUT (CreateOrReplace) requests. + *OutputProperties `json:"properties,omitempty"` + // ID - Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - Resource type + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Output. +func (o Output) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if o.OutputProperties != nil { + objectMap["properties"] = o.OutputProperties + } + if o.ID != nil { + objectMap["id"] = o.ID + } + if o.Name != nil { + objectMap["name"] = o.Name + } + if o.Type != nil { + objectMap["type"] = o.Type + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for Output struct. +func (o *Output) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var outputProperties OutputProperties + err = json.Unmarshal(*v, &outputProperties) + if err != nil { + return err + } + o.OutputProperties = &outputProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + o.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + o.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + o.Type = &typeVar + } + } + } + + return nil +} + +// BasicOutputDataSource describes the data source that output will be written to. +type BasicOutputDataSource interface { + AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) + AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) + AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) + AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) + AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) + AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) + AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) + AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) + AsBlobOutputDataSource() (*BlobOutputDataSource, bool) + AsOutputDataSource() (*OutputDataSource, bool) +} + +// OutputDataSource describes the data source that output will be written to. +type OutputDataSource struct { + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +func unmarshalBasicOutputDataSource(body []byte) (BasicOutputDataSource, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeMicrosoftDataLakeAccounts): + var adlsods AzureDataLakeStoreOutputDataSource + err := json.Unmarshal(body, &adlsods) + return adlsods, err + case string(TypePowerBI): + var pbods PowerBIOutputDataSource + err := json.Unmarshal(body, &pbods) + return pbods, err + case string(TypeMicrosoftServiceBusTopic): + var sbtods ServiceBusTopicOutputDataSource + err := json.Unmarshal(body, &sbtods) + return sbtods, err + case string(TypeMicrosoftServiceBusQueue): + var sbqods ServiceBusQueueOutputDataSource + err := json.Unmarshal(body, &sbqods) + return sbqods, err + case string(TypeMicrosoftStorageDocumentDB): + var ddods DocumentDbOutputDataSource + err := json.Unmarshal(body, &ddods) + return ddods, err + case string(TypeMicrosoftSQLServerDatabase): + var asdods AzureSQLDatabaseOutputDataSource + err := json.Unmarshal(body, &asdods) + return asdods, err + case string(TypeMicrosoftServiceBusEventHub): + var ehods EventHubOutputDataSource + err := json.Unmarshal(body, &ehods) + return ehods, err + case string(TypeMicrosoftStorageTable): + var atods AzureTableOutputDataSource + err := json.Unmarshal(body, &atods) + return atods, err + case string(TypeMicrosoftStorageBlob): + var bods BlobOutputDataSource + err := json.Unmarshal(body, &bods) + return bods, err + default: + var ods OutputDataSource + err := json.Unmarshal(body, &ods) + return ods, err + } +} +func unmarshalBasicOutputDataSourceArray(body []byte) ([]BasicOutputDataSource, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + odsArray := make([]BasicOutputDataSource, len(rawMessages)) + + for index, rawMessage := range rawMessages { + ods, err := unmarshalBasicOutputDataSource(*rawMessage) + if err != nil { + return nil, err + } + odsArray[index] = ods + } + return odsArray, nil +} + +// MarshalJSON is the custom marshaler for OutputDataSource. +func (ods OutputDataSource) MarshalJSON() ([]byte, error) { + ods.Type = TypeOutputDataSource + objectMap := make(map[string]interface{}) + if ods.Type != "" { + objectMap["type"] = ods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return &ods, true +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &ods, true +} + +// OutputListResult object containing a list of outputs under a streaming job. +type OutputListResult struct { + autorest.Response `json:"-"` + // Value - A list of outputs under a streaming job. Populated by a 'List' operation. + Value *[]Output `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// OutputListResultIterator provides access to a complete listing of Output values. +type OutputListResultIterator struct { + i int + page OutputListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *OutputListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *OutputListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter OutputListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter OutputListResultIterator) Response() OutputListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter OutputListResultIterator) Value() Output { + if !iter.page.NotDone() { + return Output{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the OutputListResultIterator type. +func NewOutputListResultIterator(page OutputListResultPage) OutputListResultIterator { + return OutputListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (olr OutputListResult) IsEmpty() bool { + return olr.Value == nil || len(*olr.Value) == 0 +} + +// outputListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (olr OutputListResult) outputListResultPreparer(ctx context.Context) (*http.Request, error) { + if olr.NextLink == nil || len(to.String(olr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(olr.NextLink))) +} + +// OutputListResultPage contains a page of Output values. +type OutputListResultPage struct { + fn func(context.Context, OutputListResult) (OutputListResult, error) + olr OutputListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *OutputListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.olr) + if err != nil { + return err + } + page.olr = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *OutputListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page OutputListResultPage) NotDone() bool { + return !page.olr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page OutputListResultPage) Response() OutputListResult { + return page.olr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page OutputListResultPage) Values() []Output { + if page.olr.IsEmpty() { + return nil + } + return *page.olr.Value +} + +// Creates a new instance of the OutputListResultPage type. +func NewOutputListResultPage(getNextPage func(context.Context, OutputListResult) (OutputListResult, error)) OutputListResultPage { + return OutputListResultPage{fn: getNextPage} +} + +// OutputProperties the properties that are associated with an output. +type OutputProperties struct { + // Datasource - Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests. + Datasource BasicOutputDataSource `json:"datasource,omitempty"` + // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. + Serialization BasicSerialization `json:"serialization,omitempty"` + // Diagnostics - Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + // Etag - The current entity tag for the output. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for OutputProperties struct. +func (op *OutputProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "datasource": + if v != nil { + datasource, err := unmarshalBasicOutputDataSource(*v) + if err != nil { + return err + } + op.Datasource = datasource + } + case "serialization": + if v != nil { + serialization, err := unmarshalBasicSerialization(*v) + if err != nil { + return err + } + op.Serialization = serialization + } + case "diagnostics": + if v != nil { + var diagnostics Diagnostics + err = json.Unmarshal(*v, &diagnostics) + if err != nil { + return err + } + op.Diagnostics = &diagnostics + } + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + op.Etag = &etag + } + } + } + + return nil +} + +// OutputsTestFuture an abstraction for monitoring and retrieving the results of a long-running operation. +type OutputsTestFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *OutputsTestFuture) Result(client OutputsClient) (rts ResourceTestStatus, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsTestFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.OutputsTestFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if rts.Response.Response, err = future.GetResult(sender); err == nil && rts.Response.Response.StatusCode != http.StatusNoContent { + rts, err = client.TestResponder(rts.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsTestFuture", "Result", rts.Response.Response, "Failure responding to request") + } + } + return +} + +// PowerBIOutputDataSource describes a Power BI output data source. +type PowerBIOutputDataSource struct { + // PowerBIOutputDataSourceProperties - The properties that are associated with a Power BI output. Required on PUT (CreateOrReplace) requests. + *PowerBIOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) MarshalJSON() ([]byte, error) { + pbods.Type = TypePowerBI + objectMap := make(map[string]interface{}) + if pbods.PowerBIOutputDataSourceProperties != nil { + objectMap["properties"] = pbods.PowerBIOutputDataSourceProperties + } + if pbods.Type != "" { + objectMap["type"] = pbods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return &pbods, true +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &pbods, true +} + +// UnmarshalJSON is the custom unmarshaler for PowerBIOutputDataSource struct. +func (pbods *PowerBIOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var powerBIOutputDataSourceProperties PowerBIOutputDataSourceProperties + err = json.Unmarshal(*v, &powerBIOutputDataSourceProperties) + if err != nil { + return err + } + pbods.PowerBIOutputDataSourceProperties = &powerBIOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + pbods.Type = typeVar + } + } + } + + return nil +} + +// PowerBIOutputDataSourceProperties the properties that are associated with a Power BI output. +type PowerBIOutputDataSourceProperties struct { + // Dataset - The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + Dataset *string `json:"dataset,omitempty"` + // Table - The name of the Power BI table under the specified dataset. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` + // GroupID - The ID of the Power BI group. + GroupID *string `json:"groupId,omitempty"` + // GroupName - The name of the Power BI group. Use this property to help remember which specific Power BI group id was used. + GroupName *string `json:"groupName,omitempty"` + // RefreshToken - A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. + RefreshToken *string `json:"refreshToken,omitempty"` + // TokenUserPrincipalName - The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` + // TokenUserDisplayName - The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` +} + +// BasicReferenceInputDataSource describes an input data source that contains reference data. +type BasicReferenceInputDataSource interface { + AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) + AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) +} + +// ReferenceInputDataSource describes an input data source that contains reference data. +type ReferenceInputDataSource struct { + // Type - Possible values include: 'TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource', 'TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob' + Type TypeBasicReferenceInputDataSource `json:"type,omitempty"` +} + +func unmarshalBasicReferenceInputDataSource(body []byte) (BasicReferenceInputDataSource, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob): + var brids BlobReferenceInputDataSource + err := json.Unmarshal(body, &brids) + return brids, err + default: + var rids ReferenceInputDataSource + err := json.Unmarshal(body, &rids) + return rids, err + } +} +func unmarshalBasicReferenceInputDataSourceArray(body []byte) ([]BasicReferenceInputDataSource, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + ridsArray := make([]BasicReferenceInputDataSource, len(rawMessages)) + + for index, rawMessage := range rawMessages { + rids, err := unmarshalBasicReferenceInputDataSource(*rawMessage) + if err != nil { + return nil, err + } + ridsArray[index] = rids + } + return ridsArray, nil +} + +// MarshalJSON is the custom marshaler for ReferenceInputDataSource. +func (rids ReferenceInputDataSource) MarshalJSON() ([]byte, error) { + rids.Type = TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource + objectMap := make(map[string]interface{}) + if rids.Type != "" { + objectMap["type"] = rids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. +func (rids ReferenceInputDataSource) AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) { + return nil, false +} + +// AsReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. +func (rids ReferenceInputDataSource) AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) { + return &rids, true +} + +// AsBasicReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. +func (rids ReferenceInputDataSource) AsBasicReferenceInputDataSource() (BasicReferenceInputDataSource, bool) { + return &rids, true +} + +// ReferenceInputProperties the properties that are associated with an input containing reference data. +type ReferenceInputProperties struct { + // Datasource - Describes an input data source that contains reference data. Required on PUT (CreateOrReplace) requests. + Datasource BasicReferenceInputDataSource `json:"datasource,omitempty"` + // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. + Serialization BasicSerialization `json:"serialization,omitempty"` + // Diagnostics - Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + // Etag - The current entity tag for the input. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + // Type - Possible values include: 'TypeInputProperties', 'TypeReference', 'TypeStream' + Type TypeBasicInputProperties `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ReferenceInputProperties. +func (rip ReferenceInputProperties) MarshalJSON() ([]byte, error) { + rip.Type = TypeReference + objectMap := make(map[string]interface{}) + objectMap["datasource"] = rip.Datasource + objectMap["serialization"] = rip.Serialization + if rip.Diagnostics != nil { + objectMap["diagnostics"] = rip.Diagnostics + } + if rip.Etag != nil { + objectMap["etag"] = rip.Etag + } + if rip.Type != "" { + objectMap["type"] = rip.Type + } + return json.Marshal(objectMap) +} + +// AsReferenceInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. +func (rip ReferenceInputProperties) AsReferenceInputProperties() (*ReferenceInputProperties, bool) { + return &rip, true +} + +// AsStreamInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. +func (rip ReferenceInputProperties) AsStreamInputProperties() (*StreamInputProperties, bool) { + return nil, false +} + +// AsInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. +func (rip ReferenceInputProperties) AsInputProperties() (*InputProperties, bool) { + return nil, false +} + +// AsBasicInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. +func (rip ReferenceInputProperties) AsBasicInputProperties() (BasicInputProperties, bool) { + return &rip, true +} + +// UnmarshalJSON is the custom unmarshaler for ReferenceInputProperties struct. +func (rip *ReferenceInputProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "datasource": + if v != nil { + datasource, err := unmarshalBasicReferenceInputDataSource(*v) + if err != nil { + return err + } + rip.Datasource = datasource + } + case "serialization": + if v != nil { + serialization, err := unmarshalBasicSerialization(*v) + if err != nil { + return err + } + rip.Serialization = serialization + } + case "diagnostics": + if v != nil { + var diagnostics Diagnostics + err = json.Unmarshal(*v, &diagnostics) + if err != nil { + return err + } + rip.Diagnostics = &diagnostics + } + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + rip.Etag = &etag + } + case "type": + if v != nil { + var typeVar TypeBasicInputProperties + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + rip.Type = typeVar + } + } + } + + return nil +} + +// Resource the base resource model definition. +type Resource struct { + // ID - Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - Resource type + Type *string `json:"type,omitempty"` + // Location - Resource location. Required on PUT (CreateOrReplace) requests. + Location *string `json:"location,omitempty"` + // Tags - Resource tags + Tags map[string]*string `json:"tags"` +} + +// MarshalJSON is the custom marshaler for Resource. +func (r Resource) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if r.ID != nil { + objectMap["id"] = r.ID + } + if r.Name != nil { + objectMap["name"] = r.Name + } + if r.Type != nil { + objectMap["type"] = r.Type + } + if r.Location != nil { + objectMap["location"] = r.Location + } + if r.Tags != nil { + objectMap["tags"] = r.Tags + } + return json.Marshal(objectMap) +} + +// ResourceTestStatus describes the status of the test operation along with error information, if +// applicable. +type ResourceTestStatus struct { + autorest.Response `json:"-"` + // Status - The status of the test operation. + Status *string `json:"status,omitempty"` + // Error - Describes the error that occurred. + Error *ErrorResponse `json:"error,omitempty"` +} + +// ScalarFunctionConfiguration describes the configuration of the scalar function. +type ScalarFunctionConfiguration struct { + // Inputs - A list of inputs describing the parameters of the function. + Inputs *[]FunctionInput `json:"inputs,omitempty"` + // Output - The output of the function. + Output *FunctionOutput `json:"output,omitempty"` + // Binding - The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. + Binding BasicFunctionBinding `json:"binding,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for ScalarFunctionConfiguration struct. +func (sfc *ScalarFunctionConfiguration) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "inputs": + if v != nil { + var inputs []FunctionInput + err = json.Unmarshal(*v, &inputs) + if err != nil { + return err + } + sfc.Inputs = &inputs + } + case "output": + if v != nil { + var output FunctionOutput + err = json.Unmarshal(*v, &output) + if err != nil { + return err + } + sfc.Output = &output + } + case "binding": + if v != nil { + binding, err := unmarshalBasicFunctionBinding(*v) + if err != nil { + return err + } + sfc.Binding = binding + } + } + } + + return nil +} + +// ScalarFunctionProperties the properties that are associated with a scalar function. +type ScalarFunctionProperties struct { + // ScalarFunctionConfiguration - Describes the configuration of the scalar function. + *ScalarFunctionConfiguration `json:"properties,omitempty"` + // Etag - The current entity tag for the function. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + // Type - Possible values include: 'TypeFunctionProperties', 'TypeScalar' + Type TypeBasicFunctionProperties `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ScalarFunctionProperties. +func (sfp ScalarFunctionProperties) MarshalJSON() ([]byte, error) { + sfp.Type = TypeScalar + objectMap := make(map[string]interface{}) + if sfp.ScalarFunctionConfiguration != nil { + objectMap["properties"] = sfp.ScalarFunctionConfiguration + } + if sfp.Etag != nil { + objectMap["etag"] = sfp.Etag + } + if sfp.Type != "" { + objectMap["type"] = sfp.Type + } + return json.Marshal(objectMap) +} + +// AsScalarFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. +func (sfp ScalarFunctionProperties) AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) { + return &sfp, true +} + +// AsFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. +func (sfp ScalarFunctionProperties) AsFunctionProperties() (*FunctionProperties, bool) { + return nil, false +} + +// AsBasicFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. +func (sfp ScalarFunctionProperties) AsBasicFunctionProperties() (BasicFunctionProperties, bool) { + return &sfp, true +} + +// UnmarshalJSON is the custom unmarshaler for ScalarFunctionProperties struct. +func (sfp *ScalarFunctionProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var scalarFunctionConfiguration ScalarFunctionConfiguration + err = json.Unmarshal(*v, &scalarFunctionConfiguration) + if err != nil { + return err + } + sfp.ScalarFunctionConfiguration = &scalarFunctionConfiguration + } + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + sfp.Etag = &etag + } + case "type": + if v != nil { + var typeVar TypeBasicFunctionProperties + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sfp.Type = typeVar + } + } + } + + return nil +} + +// BasicSerialization describes how data from an input is serialized or how data is serialized when written to an +// output. +type BasicSerialization interface { + AsAvroSerialization() (*AvroSerialization, bool) + AsJSONSerialization() (*JSONSerialization, bool) + AsCsvSerialization() (*CsvSerialization, bool) + AsSerialization() (*Serialization, bool) +} + +// Serialization describes how data from an input is serialized or how data is serialized when written to an +// output. +type Serialization struct { + // Type - Possible values include: 'TypeSerialization', 'TypeAvro', 'TypeJSON', 'TypeCsv' + Type Type `json:"type,omitempty"` +} + +func unmarshalBasicSerialization(body []byte) (BasicSerialization, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeAvro): + var as AvroSerialization + err := json.Unmarshal(body, &as) + return as, err + case string(TypeJSON): + var js JSONSerialization + err := json.Unmarshal(body, &js) + return js, err + case string(TypeCsv): + var cs CsvSerialization + err := json.Unmarshal(body, &cs) + return cs, err + default: + var s Serialization + err := json.Unmarshal(body, &s) + return s, err + } +} +func unmarshalBasicSerializationArray(body []byte) ([]BasicSerialization, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + sArray := make([]BasicSerialization, len(rawMessages)) + + for index, rawMessage := range rawMessages { + s, err := unmarshalBasicSerialization(*rawMessage) + if err != nil { + return nil, err + } + sArray[index] = s + } + return sArray, nil +} + +// MarshalJSON is the custom marshaler for Serialization. +func (s Serialization) MarshalJSON() ([]byte, error) { + s.Type = TypeSerialization + objectMap := make(map[string]interface{}) + if s.Type != "" { + objectMap["type"] = s.Type + } + return json.Marshal(objectMap) +} + +// AsAvroSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsAvroSerialization() (*AvroSerialization, bool) { + return nil, false +} + +// AsJSONSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsJSONSerialization() (*JSONSerialization, bool) { + return nil, false +} + +// AsCsvSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsCsvSerialization() (*CsvSerialization, bool) { + return nil, false +} + +// AsSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsSerialization() (*Serialization, bool) { + return &s, true +} + +// AsBasicSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsBasicSerialization() (BasicSerialization, bool) { + return &s, true +} + +// ServiceBusDataSourceProperties the common properties that are associated with Service Bus data sources +// (Queues, Topics, Event Hubs, etc.). +type ServiceBusDataSourceProperties struct { + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` +} + +// ServiceBusQueueOutputDataSource describes a Service Bus Queue output data source. +type ServiceBusQueueOutputDataSource struct { + // ServiceBusQueueOutputDataSourceProperties - The properties that are associated with a Service Bus Queue output. Required on PUT (CreateOrReplace) requests. + *ServiceBusQueueOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) MarshalJSON() ([]byte, error) { + sbqods.Type = TypeMicrosoftServiceBusQueue + objectMap := make(map[string]interface{}) + if sbqods.ServiceBusQueueOutputDataSourceProperties != nil { + objectMap["properties"] = sbqods.ServiceBusQueueOutputDataSourceProperties + } + if sbqods.Type != "" { + objectMap["type"] = sbqods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return &sbqods, true +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &sbqods, true +} + +// UnmarshalJSON is the custom unmarshaler for ServiceBusQueueOutputDataSource struct. +func (sbqods *ServiceBusQueueOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var serviceBusQueueOutputDataSourceProperties ServiceBusQueueOutputDataSourceProperties + err = json.Unmarshal(*v, &serviceBusQueueOutputDataSourceProperties) + if err != nil { + return err + } + sbqods.ServiceBusQueueOutputDataSourceProperties = &serviceBusQueueOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sbqods.Type = typeVar + } + } + } + + return nil +} + +// ServiceBusQueueOutputDataSourceProperties the properties that are associated with a Service Bus Queue +// output. +type ServiceBusQueueOutputDataSourceProperties struct { + // QueueName - The name of the Service Bus Queue. Required on PUT (CreateOrReplace) requests. + QueueName *string `json:"queueName,omitempty"` + // PropertyColumns - A string array of the names of output columns to be attached to Service Bus messages as custom properties. + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` +} + +// ServiceBusTopicOutputDataSource describes a Service Bus Topic output data source. +type ServiceBusTopicOutputDataSource struct { + // ServiceBusTopicOutputDataSourceProperties - The properties that are associated with a Service Bus Topic output. Required on PUT (CreateOrReplace) requests. + *ServiceBusTopicOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftDataLakeAccounts', 'TypePowerBI', 'TypeMicrosoftServiceBusTopic', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftStorageTable', 'TypeMicrosoftStorageBlob' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) MarshalJSON() ([]byte, error) { + sbtods.Type = TypeMicrosoftServiceBusTopic + objectMap := make(map[string]interface{}) + if sbtods.ServiceBusTopicOutputDataSourceProperties != nil { + objectMap["properties"] = sbtods.ServiceBusTopicOutputDataSourceProperties + } + if sbtods.Type != "" { + objectMap["type"] = sbtods.Type + } + return json.Marshal(objectMap) +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return &sbtods, true +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &sbtods, true +} + +// UnmarshalJSON is the custom unmarshaler for ServiceBusTopicOutputDataSource struct. +func (sbtods *ServiceBusTopicOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var serviceBusTopicOutputDataSourceProperties ServiceBusTopicOutputDataSourceProperties + err = json.Unmarshal(*v, &serviceBusTopicOutputDataSourceProperties) + if err != nil { + return err + } + sbtods.ServiceBusTopicOutputDataSourceProperties = &serviceBusTopicOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sbtods.Type = typeVar + } + } + } + + return nil +} + +// ServiceBusTopicOutputDataSourceProperties the properties that are associated with a Service Bus Topic +// output. +type ServiceBusTopicOutputDataSourceProperties struct { + // TopicName - The name of the Service Bus Topic. Required on PUT (CreateOrReplace) requests. + TopicName *string `json:"topicName,omitempty"` + // PropertyColumns - A string array of the names of output columns to be attached to Service Bus messages as custom properties. + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` +} + +// Sku the properties that are associated with a SKU. +type Sku struct { + // Name - The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values include: 'Standard' + Name SkuName `json:"name,omitempty"` +} + +// StartStreamingJobParameters parameters supplied to the Start Streaming Job operation. +type StartStreamingJobParameters struct { + // OutputStartMode - Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output time. Possible values include: 'JobStartTime', 'CustomTime', 'LastOutputEventTime' + OutputStartMode OutputStartMode `json:"outputStartMode,omitempty"` + // OutputStartTime - Value is either an ISO-8601 formatted time stamp that indicates the starting point of the output event stream, or null to indicate that the output event stream will start whenever the streaming job is started. This property must have a value if outputStartMode is set to CustomTime. + OutputStartTime *date.Time `json:"outputStartTime,omitempty"` +} + +// StorageAccount the properties that are associated with an Azure Storage account +type StorageAccount struct { + // AccountName - The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountName *string `json:"accountName,omitempty"` + // AccountKey - The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountKey *string `json:"accountKey,omitempty"` +} + +// StreamingJob a streaming job object, containing all information associated with the named streaming job. +type StreamingJob struct { + autorest.Response `json:"-"` + // StreamingJobProperties - The properties that are associated with a streaming job. Required on PUT (CreateOrReplace) requests. + *StreamingJobProperties `json:"properties,omitempty"` + // ID - Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - Resource type + Type *string `json:"type,omitempty"` + // Location - Resource location. Required on PUT (CreateOrReplace) requests. + Location *string `json:"location,omitempty"` + // Tags - Resource tags + Tags map[string]*string `json:"tags"` +} + +// MarshalJSON is the custom marshaler for StreamingJob. +func (sj StreamingJob) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sj.StreamingJobProperties != nil { + objectMap["properties"] = sj.StreamingJobProperties + } + if sj.ID != nil { + objectMap["id"] = sj.ID + } + if sj.Name != nil { + objectMap["name"] = sj.Name + } + if sj.Type != nil { + objectMap["type"] = sj.Type + } + if sj.Location != nil { + objectMap["location"] = sj.Location + } + if sj.Tags != nil { + objectMap["tags"] = sj.Tags + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for StreamingJob struct. +func (sj *StreamingJob) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var streamingJobProperties StreamingJobProperties + err = json.Unmarshal(*v, &streamingJobProperties) + if err != nil { + return err + } + sj.StreamingJobProperties = &streamingJobProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sj.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sj.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sj.Type = &typeVar + } + case "location": + if v != nil { + var location string + err = json.Unmarshal(*v, &location) + if err != nil { + return err + } + sj.Location = &location + } + case "tags": + if v != nil { + var tags map[string]*string + err = json.Unmarshal(*v, &tags) + if err != nil { + return err + } + sj.Tags = tags + } + } + } + + return nil +} + +// StreamingJobListResult object containing a list of streaming jobs. +type StreamingJobListResult struct { + autorest.Response `json:"-"` + // Value - A list of streaming jobs. Populated by a 'List' operation. + Value *[]StreamingJob `json:"value,omitempty"` + // NextLink - The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// StreamingJobListResultIterator provides access to a complete listing of StreamingJob values. +type StreamingJobListResultIterator struct { + i int + page StreamingJobListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *StreamingJobListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *StreamingJobListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter StreamingJobListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter StreamingJobListResultIterator) Response() StreamingJobListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter StreamingJobListResultIterator) Value() StreamingJob { + if !iter.page.NotDone() { + return StreamingJob{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the StreamingJobListResultIterator type. +func NewStreamingJobListResultIterator(page StreamingJobListResultPage) StreamingJobListResultIterator { + return StreamingJobListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (sjlr StreamingJobListResult) IsEmpty() bool { + return sjlr.Value == nil || len(*sjlr.Value) == 0 +} + +// streamingJobListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (sjlr StreamingJobListResult) streamingJobListResultPreparer(ctx context.Context) (*http.Request, error) { + if sjlr.NextLink == nil || len(to.String(sjlr.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(sjlr.NextLink))) +} + +// StreamingJobListResultPage contains a page of StreamingJob values. +type StreamingJobListResultPage struct { + fn func(context.Context, StreamingJobListResult) (StreamingJobListResult, error) + sjlr StreamingJobListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *StreamingJobListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.sjlr) + if err != nil { + return err + } + page.sjlr = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *StreamingJobListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page StreamingJobListResultPage) NotDone() bool { + return !page.sjlr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page StreamingJobListResultPage) Response() StreamingJobListResult { + return page.sjlr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page StreamingJobListResultPage) Values() []StreamingJob { + if page.sjlr.IsEmpty() { + return nil + } + return *page.sjlr.Value +} + +// Creates a new instance of the StreamingJobListResultPage type. +func NewStreamingJobListResultPage(getNextPage func(context.Context, StreamingJobListResult) (StreamingJobListResult, error)) StreamingJobListResultPage { + return StreamingJobListResultPage{fn: getNextPage} +} + +// StreamingJobProperties the properties that are associated with a streaming job. +type StreamingJobProperties struct { + // Sku - Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. + Sku *Sku `json:"sku,omitempty"` + // JobID - A GUID uniquely identifying the streaming job. This GUID is generated upon creation of the streaming job. + JobID *string `json:"jobId,omitempty"` + // ProvisioningState - Describes the provisioning status of the streaming job. + ProvisioningState *string `json:"provisioningState,omitempty"` + // JobState - Describes the state of the streaming job. + JobState *string `json:"jobState,omitempty"` + // OutputStartMode - This property should only be utilized when it is desired that the job be started immediately upon creation. Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output time. Possible values include: 'JobStartTime', 'CustomTime', 'LastOutputEventTime' + OutputStartMode OutputStartMode `json:"outputStartMode,omitempty"` + // OutputStartTime - Value is either an ISO-8601 formatted time stamp that indicates the starting point of the output event stream, or null to indicate that the output event stream will start whenever the streaming job is started. This property must have a value if outputStartMode is set to CustomTime. + OutputStartTime *date.Time `json:"outputStartTime,omitempty"` + // LastOutputEventTime - Value is either an ISO-8601 formatted timestamp indicating the last output event time of the streaming job or null indicating that output has not yet been produced. In case of multiple outputs or multiple streams, this shows the latest value in that set. + LastOutputEventTime *date.Time `json:"lastOutputEventTime,omitempty"` + // EventsOutOfOrderPolicy - Indicates the policy to apply to events that arrive out of order in the input event stream. Possible values include: 'Adjust', 'Drop' + EventsOutOfOrderPolicy EventsOutOfOrderPolicy `json:"eventsOutOfOrderPolicy,omitempty"` + // OutputErrorPolicy - Indicates the policy to apply to events that arrive at the output and cannot be written to the external storage due to being malformed (missing column values, column values of wrong type or size). Possible values include: 'OutputErrorPolicyStop', 'OutputErrorPolicyDrop' + OutputErrorPolicy OutputErrorPolicy `json:"outputErrorPolicy,omitempty"` + // EventsOutOfOrderMaxDelayInSeconds - The maximum tolerable delay in seconds where out-of-order events can be adjusted to be back in order. + EventsOutOfOrderMaxDelayInSeconds *int32 `json:"eventsOutOfOrderMaxDelayInSeconds,omitempty"` + // EventsLateArrivalMaxDelayInSeconds - The maximum tolerable delay in seconds where events arriving late could be included. Supported range is -1 to 1814399 (20.23:59:59 days) and -1 is used to specify wait indefinitely. If the property is absent, it is interpreted to have a value of -1. + EventsLateArrivalMaxDelayInSeconds *int32 `json:"eventsLateArrivalMaxDelayInSeconds,omitempty"` + // DataLocale - The data locale of the stream analytics job. Value should be the name of a supported .NET Culture from the set https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none specified. + DataLocale *string `json:"dataLocale,omitempty"` + // CompatibilityLevel - Controls certain runtime behaviors of the streaming job. Possible values include: 'OneFullStopZero' + CompatibilityLevel CompatibilityLevel `json:"compatibilityLevel,omitempty"` + // CreatedDate - Value is an ISO-8601 formatted UTC timestamp indicating when the streaming job was created. + CreatedDate *date.Time `json:"createdDate,omitempty"` + // Inputs - A list of one or more inputs to the streaming job. The name property for each input is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual input. + Inputs *[]Input `json:"inputs,omitempty"` + // Transformation - Indicates the query and the number of streaming units to use for the streaming job. The name property of the transformation is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation. + Transformation *Transformation `json:"transformation,omitempty"` + // Outputs - A list of one or more outputs for the streaming job. The name property for each output is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual output. + Outputs *[]Output `json:"outputs,omitempty"` + // Functions - A list of one or more functions for the streaming job. The name property for each function is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation. + Functions *[]Function `json:"functions,omitempty"` + // Etag - The current entity tag for the streaming job. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` +} + +// StreamingJobsCreateOrReplaceFuture an abstraction for monitoring and retrieving the results of a +// long-running operation. +type StreamingJobsCreateOrReplaceFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *StreamingJobsCreateOrReplaceFuture) Result(client StreamingJobsClient) (sj StreamingJob, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsCreateOrReplaceFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsCreateOrReplaceFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if sj.Response.Response, err = future.GetResult(sender); err == nil && sj.Response.Response.StatusCode != http.StatusNoContent { + sj, err = client.CreateOrReplaceResponder(sj.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsCreateOrReplaceFuture", "Result", sj.Response.Response, "Failure responding to request") + } + } + return +} + +// StreamingJobsDeleteFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type StreamingJobsDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *StreamingJobsDeleteFuture) Result(client StreamingJobsClient) (ar autorest.Response, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsDeleteFuture") + return + } + ar.Response = future.Response() + return +} + +// StreamingJobsStartFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type StreamingJobsStartFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *StreamingJobsStartFuture) Result(client StreamingJobsClient) (ar autorest.Response, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsStartFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsStartFuture") + return + } + ar.Response = future.Response() + return +} + +// StreamingJobsStopFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type StreamingJobsStopFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *StreamingJobsStopFuture) Result(client StreamingJobsClient) (ar autorest.Response, err error) { + var done bool + done, err = future.Done(client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsStopFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsStopFuture") + return + } + ar.Response = future.Response() + return +} + +// BasicStreamInputDataSource describes an input data source that contains stream data. +type BasicStreamInputDataSource interface { + AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) + AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) + AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) + AsStreamInputDataSource() (*StreamInputDataSource, bool) +} + +// StreamInputDataSource describes an input data source that contains stream data. +type StreamInputDataSource struct { + // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob' + Type TypeBasicStreamInputDataSource `json:"type,omitempty"` +} + +func unmarshalBasicStreamInputDataSource(body []byte) (BasicStreamInputDataSource, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs): + var ithsids IoTHubStreamInputDataSource + err := json.Unmarshal(body, &ithsids) + return ithsids, err + case string(TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub): + var ehsids EventHubStreamInputDataSource + err := json.Unmarshal(body, &ehsids) + return ehsids, err + case string(TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob): + var bsids BlobStreamInputDataSource + err := json.Unmarshal(body, &bsids) + return bsids, err + default: + var sids StreamInputDataSource + err := json.Unmarshal(body, &sids) + return sids, err + } +} +func unmarshalBasicStreamInputDataSourceArray(body []byte) ([]BasicStreamInputDataSource, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + sidsArray := make([]BasicStreamInputDataSource, len(rawMessages)) + + for index, rawMessage := range rawMessages { + sids, err := unmarshalBasicStreamInputDataSource(*rawMessage) + if err != nil { + return nil, err + } + sidsArray[index] = sids + } + return sidsArray, nil +} + +// MarshalJSON is the custom marshaler for StreamInputDataSource. +func (sids StreamInputDataSource) MarshalJSON() ([]byte, error) { + sids.Type = TypeBasicStreamInputDataSourceTypeStreamInputDataSource + objectMap := make(map[string]interface{}) + if sids.Type != "" { + objectMap["type"] = sids.Type + } + return json.Marshal(objectMap) +} + +// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { + return nil, false +} + +// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { + return nil, false +} + +// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { + return &sids, true +} + +// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { + return &sids, true +} + +// StreamInputProperties the properties that are associated with an input containing stream data. +type StreamInputProperties struct { + // Datasource - Describes an input data source that contains stream data. Required on PUT (CreateOrReplace) requests. + Datasource BasicStreamInputDataSource `json:"datasource,omitempty"` + // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. + Serialization BasicSerialization `json:"serialization,omitempty"` + // Diagnostics - Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + // Etag - The current entity tag for the input. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + // Type - Possible values include: 'TypeInputProperties', 'TypeReference', 'TypeStream' + Type TypeBasicInputProperties `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for StreamInputProperties. +func (sip StreamInputProperties) MarshalJSON() ([]byte, error) { + sip.Type = TypeStream + objectMap := make(map[string]interface{}) + objectMap["datasource"] = sip.Datasource + objectMap["serialization"] = sip.Serialization + if sip.Diagnostics != nil { + objectMap["diagnostics"] = sip.Diagnostics + } + if sip.Etag != nil { + objectMap["etag"] = sip.Etag + } + if sip.Type != "" { + objectMap["type"] = sip.Type + } + return json.Marshal(objectMap) +} + +// AsReferenceInputProperties is the BasicInputProperties implementation for StreamInputProperties. +func (sip StreamInputProperties) AsReferenceInputProperties() (*ReferenceInputProperties, bool) { + return nil, false +} + +// AsStreamInputProperties is the BasicInputProperties implementation for StreamInputProperties. +func (sip StreamInputProperties) AsStreamInputProperties() (*StreamInputProperties, bool) { + return &sip, true +} + +// AsInputProperties is the BasicInputProperties implementation for StreamInputProperties. +func (sip StreamInputProperties) AsInputProperties() (*InputProperties, bool) { + return nil, false +} + +// AsBasicInputProperties is the BasicInputProperties implementation for StreamInputProperties. +func (sip StreamInputProperties) AsBasicInputProperties() (BasicInputProperties, bool) { + return &sip, true +} + +// UnmarshalJSON is the custom unmarshaler for StreamInputProperties struct. +func (sip *StreamInputProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "datasource": + if v != nil { + datasource, err := unmarshalBasicStreamInputDataSource(*v) + if err != nil { + return err + } + sip.Datasource = datasource + } + case "serialization": + if v != nil { + serialization, err := unmarshalBasicSerialization(*v) + if err != nil { + return err + } + sip.Serialization = serialization + } + case "diagnostics": + if v != nil { + var diagnostics Diagnostics + err = json.Unmarshal(*v, &diagnostics) + if err != nil { + return err + } + sip.Diagnostics = &diagnostics + } + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + sip.Etag = &etag + } + case "type": + if v != nil { + var typeVar TypeBasicInputProperties + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sip.Type = typeVar + } + } + } + + return nil +} + +// SubResource the base sub-resource model definition. +type SubResource struct { + // ID - Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - Resource type + Type *string `json:"type,omitempty"` +} + +// SubscriptionQuota describes the current quota for the subscription. +type SubscriptionQuota struct { + // SubscriptionQuotaProperties - Describes the properties of the quota. + *SubscriptionQuotaProperties `json:"properties,omitempty"` + // ID - Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - Resource type + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for SubscriptionQuota. +func (sq SubscriptionQuota) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sq.SubscriptionQuotaProperties != nil { + objectMap["properties"] = sq.SubscriptionQuotaProperties + } + if sq.ID != nil { + objectMap["id"] = sq.ID + } + if sq.Name != nil { + objectMap["name"] = sq.Name + } + if sq.Type != nil { + objectMap["type"] = sq.Type + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for SubscriptionQuota struct. +func (sq *SubscriptionQuota) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var subscriptionQuotaProperties SubscriptionQuotaProperties + err = json.Unmarshal(*v, &subscriptionQuotaProperties) + if err != nil { + return err + } + sq.SubscriptionQuotaProperties = &subscriptionQuotaProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sq.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sq.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sq.Type = &typeVar + } + } + } + + return nil +} + +// SubscriptionQuotaProperties describes the properties of the quota. +type SubscriptionQuotaProperties struct { + // MaxCount - The max permitted usage of this resource. + MaxCount *int32 `json:"maxCount,omitempty"` + // CurrentCount - The current usage of this resource. + CurrentCount *int32 `json:"currentCount,omitempty"` +} + +// SubscriptionQuotasListResult result of the GetQuotas operation. It contains a list of quotas for the +// subscription in a particular region. +type SubscriptionQuotasListResult struct { + autorest.Response `json:"-"` + // Value - List of quotas for the subscription in a particular region. + Value *[]SubscriptionQuota `json:"value,omitempty"` +} + +// Transformation a transformation object, containing all information associated with the named +// transformation. All transformations are contained under a streaming job. +type Transformation struct { + autorest.Response `json:"-"` + // TransformationProperties - The properties that are associated with a transformation. Required on PUT (CreateOrReplace) requests. + *TransformationProperties `json:"properties,omitempty"` + // ID - Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - Resource type + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Transformation. +func (t Transformation) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if t.TransformationProperties != nil { + objectMap["properties"] = t.TransformationProperties + } + if t.ID != nil { + objectMap["id"] = t.ID + } + if t.Name != nil { + objectMap["name"] = t.Name + } + if t.Type != nil { + objectMap["type"] = t.Type + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for Transformation struct. +func (t *Transformation) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var transformationProperties TransformationProperties + err = json.Unmarshal(*v, &transformationProperties) + if err != nil { + return err + } + t.TransformationProperties = &transformationProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + t.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + t.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + t.Type = &typeVar + } + } + } + + return nil +} + +// TransformationProperties the properties that are associated with a transformation. +type TransformationProperties struct { + // StreamingUnits - Specifies the number of streaming units that the streaming job uses. + StreamingUnits *int32 `json:"streamingUnits,omitempty"` + // Query - Specifies the query that will be run in the streaming job. You can learn more about the Stream Analytics Query Language (SAQL) here: https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. + Query *string `json:"query,omitempty"` + // Etag - The current entity tag for the transformation. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/operations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/operations.go new file mode 100644 index 000000000000..c42a3dcb7615 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/operations.go @@ -0,0 +1,147 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// OperationsClient is the stream Analytics Client +type OperationsClient struct { + BaseClient +} + +// NewOperationsClient creates an instance of the OperationsClient client. +func NewOperationsClient(subscriptionID string) OperationsClient { + return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewOperationsClientWithBaseURI creates an instance of the OperationsClient client. +func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient { + return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// List lists all of the available Stream Analytics related operations. +func (client OperationsClient) List(ctx context.Context) (result OperationListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List") + defer func() { + sc := -1 + if result.olr.Response.Response != nil { + sc = result.olr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listNextResults + req, err := client.ListPreparer(ctx) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "List", nil, "Failure preparing request") + return + } + + resp, err := client.ListSender(req) + if err != nil { + result.olr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "List", resp, "Failure sending request") + return + } + + result.olr, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "List", resp, "Failure responding to request") + } + + return +} + +// ListPreparer prepares the List request. +func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) { + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPath("/providers/Microsoft.StreamAnalytics/operations"), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSender sends the List request. The method will close the +// http.Response Body if it receives an error. +func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) +} + +// ListResponder handles the response to the List request. The method always +// closes the http.Response Body. +func (client OperationsClient) ListResponder(resp *http.Response) (result OperationListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listNextResults retrieves the next set of results, if any. +func (client OperationsClient) listNextResults(ctx context.Context, lastResults OperationListResult) (result OperationListResult, err error) { + req, err := lastResults.operationListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "listNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "listNextResults", resp, "Failure sending next results request") + } + result, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "listNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListComplete enumerates all values, automatically crossing page boundaries as required. +func (client OperationsClient) ListComplete(ctx context.Context) (result OperationListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.List(ctx) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/outputs.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/outputs.go new file mode 100644 index 000000000000..8e22a09c6d65 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/outputs.go @@ -0,0 +1,600 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// OutputsClient is the stream Analytics Client +type OutputsClient struct { + BaseClient +} + +// NewOutputsClient creates an instance of the OutputsClient client. +func NewOutputsClient(subscriptionID string) OutputsClient { + return NewOutputsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewOutputsClientWithBaseURI creates an instance of the OutputsClient client. +func NewOutputsClientWithBaseURI(baseURI string, subscriptionID string) OutputsClient { + return OutputsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates an output or replaces an already existing output under an existing streaming job. +// Parameters: +// output - the definition of the output that will be used to create a new output or replace the existing one +// under the streaming job. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// outputName - the name of the output. +// ifMatch - the ETag of the output. Omit this value to always overwrite the current output. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new output to be created, but to prevent updating an existing output. +// Other values will result in a 412 Pre-condition Failed response. +func (client OutputsClient) CreateOrReplace(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string, ifNoneMatch string) (result Output, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreateOrReplacePreparer(ctx, output, resourceGroupName, jobName, outputName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrReplaceSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "CreateOrReplace", resp, "Failure sending request") + return + } + + result, err = client.CreateOrReplaceResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "CreateOrReplace", resp, "Failure responding to request") + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client OutputsClient) CreateOrReplacePreparer(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), + autorest.WithJSON(output), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client OutputsClient) CreateOrReplaceResponder(resp *http.Response) (result Output, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes an output from the streaming job. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// outputName - the name of the output. +func (client OutputsClient) Delete(ctx context.Context, resourceGroupName string, jobName string, outputName string) (result autorest.Response, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Delete") + defer func() { + sc := -1 + if result.Response != nil { + sc = result.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, jobName, outputName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client OutputsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string, outputName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client OutputsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets details about the specified output. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// outputName - the name of the output. +func (client OutputsClient) Get(ctx context.Context, resourceGroupName string, jobName string, outputName string) (result Output, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, outputName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client OutputsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, outputName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client OutputsClient) GetResponder(resp *http.Response) (result Output, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByStreamingJob lists all of the outputs under the specified streaming job. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// selectParameter - the $select OData query parameter. This is a comma-separated list of structural properties +// to include in the response, or "*" to include all properties. By default, all properties are returned except +// diagnostics. Currently only accepts '*' as a valid value. +func (client OutputsClient) ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result OutputListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.olr.Response.Response != nil { + sc = result.olr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByStreamingJobNextResults + req, err := client.ListByStreamingJobPreparer(ctx, resourceGroupName, jobName, selectParameter) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.olr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "ListByStreamingJob", resp, "Failure sending request") + return + } + + result.olr, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "ListByStreamingJob", resp, "Failure responding to request") + } + + return +} + +// ListByStreamingJobPreparer prepares the ListByStreamingJob request. +func (client OutputsClient) ListByStreamingJobPreparer(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByStreamingJobSender sends the ListByStreamingJob request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) ListByStreamingJobSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListByStreamingJobResponder handles the response to the ListByStreamingJob request. The method always +// closes the http.Response Body. +func (client OutputsClient) ListByStreamingJobResponder(resp *http.Response) (result OutputListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByStreamingJobNextResults retrieves the next set of results, if any. +func (client OutputsClient) listByStreamingJobNextResults(ctx context.Context, lastResults OutputListResult) (result OutputListResult, err error) { + req, err := lastResults.outputListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "listByStreamingJobNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "listByStreamingJobNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "listByStreamingJobNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByStreamingJobComplete enumerates all values, automatically crossing page boundaries as required. +func (client OutputsClient) ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result OutputListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByStreamingJob(ctx, resourceGroupName, jobName, selectParameter) + return +} + +// Test tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics service. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// outputName - the name of the output. +// output - if the output specified does not already exist, this parameter must contain the full output +// definition intended to be tested. If the output specified already exists, this parameter can be left null to +// test the existing output as is or if specified, the properties specified will overwrite the corresponding +// properties in the existing output (exactly like a PATCH operation) and the resulting output will be tested. +func (client OutputsClient) Test(ctx context.Context, resourceGroupName string, jobName string, outputName string, output *Output) (result OutputsTestFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Test") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.TestPreparer(ctx, resourceGroupName, jobName, outputName, output) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Test", nil, "Failure preparing request") + return + } + + result, err = client.TestSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Test", result.Response(), "Failure sending request") + return + } + + return +} + +// TestPreparer prepares the Test request. +func (client OutputsClient) TestPreparer(ctx context.Context, resourceGroupName string, jobName string, outputName string, output *Output) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if output != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(output)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// TestSender sends the Test request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) TestSender(req *http.Request) (future OutputsTestFuture, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// TestResponder handles the response to the Test request. The method always +// closes the http.Response Body. +func (client OutputsClient) TestResponder(resp *http.Response) (result ResourceTestStatus, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Update updates an existing output under an existing streaming job. This can be used to partially update (ie. update +// one or two properties) an output without affecting the rest the job or output definition. +// Parameters: +// output - an Output object. The properties specified here will overwrite the corresponding properties in the +// existing output (ie. Those properties will be updated). Any properties that are set to null here will mean +// that the corresponding property in the existing output will remain the same and not change as a result of +// this PATCH operation. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// outputName - the name of the output. +// ifMatch - the ETag of the output. Omit this value to always overwrite the current output. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client OutputsClient) Update(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string) (result Output, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.UpdatePreparer(ctx, output, resourceGroupName, jobName, outputName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client OutputsClient) UpdatePreparer(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), + autorest.WithJSON(output), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client OutputsClient) UpdateResponder(resp *http.Response) (result Output, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/streamingjobs.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/streamingjobs.go new file mode 100644 index 000000000000..3dc53098b9d8 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/streamingjobs.go @@ -0,0 +1,788 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// StreamingJobsClient is the stream Analytics Client +type StreamingJobsClient struct { + BaseClient +} + +// NewStreamingJobsClient creates an instance of the StreamingJobsClient client. +func NewStreamingJobsClient(subscriptionID string) StreamingJobsClient { + return NewStreamingJobsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewStreamingJobsClientWithBaseURI creates an instance of the StreamingJobsClient client. +func NewStreamingJobsClientWithBaseURI(baseURI string, subscriptionID string) StreamingJobsClient { + return StreamingJobsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates a streaming job or replaces an already existing streaming job. +// Parameters: +// streamingJob - the definition of the streaming job that will be used to create a new streaming job or +// replace the existing one. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// ifMatch - the ETag of the streaming job. Omit this value to always overwrite the current record set. Specify +// the last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new streaming job to be created, but to prevent updating an existing +// record set. Other values will result in a 412 Pre-condition Failed response. +func (client StreamingJobsClient) CreateOrReplace(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string, ifNoneMatch string) (result StreamingJobsCreateOrReplaceFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreateOrReplacePreparer(ctx, streamingJob, resourceGroupName, jobName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + result, err = client.CreateOrReplaceSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "CreateOrReplace", result.Response(), "Failure sending request") + return + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client StreamingJobsClient) CreateOrReplacePreparer(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), + autorest.WithJSON(streamingJob), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) CreateOrReplaceSender(req *http.Request) (future StreamingJobsCreateOrReplaceFuture, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) CreateOrReplaceResponder(resp *http.Response) (result StreamingJob, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes a streaming job. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +func (client StreamingJobsClient) Delete(ctx context.Context, resourceGroupName string, jobName string) (result StreamingJobsDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, jobName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client StreamingJobsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) DeleteSender(req *http.Request) (future StreamingJobsDeleteFuture, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets details about the specified streaming job. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// expand - the $expand OData query parameter. This is a comma-separated list of additional streaming job +// properties to include in the response, beyond the default set returned when this parameter is absent. The +// default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and +// 'functions'. +func (client StreamingJobsClient) Get(ctx context.Context, resourceGroupName string, jobName string, expand string) (result StreamingJob, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, expand) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client StreamingJobsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, expand string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(expand) > 0 { + queryParameters["$expand"] = autorest.Encode("query", expand) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) GetResponder(resp *http.Response) (result StreamingJob, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// List lists all of the streaming jobs in the given subscription. +// Parameters: +// expand - the $expand OData query parameter. This is a comma-separated list of additional streaming job +// properties to include in the response, beyond the default set returned when this parameter is absent. The +// default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and +// 'functions'. +func (client StreamingJobsClient) List(ctx context.Context, expand string) (result StreamingJobListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.List") + defer func() { + sc := -1 + if result.sjlr.Response.Response != nil { + sc = result.sjlr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listNextResults + req, err := client.ListPreparer(ctx, expand) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "List", nil, "Failure preparing request") + return + } + + resp, err := client.ListSender(req) + if err != nil { + result.sjlr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "List", resp, "Failure sending request") + return + } + + result.sjlr, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "List", resp, "Failure responding to request") + } + + return +} + +// ListPreparer prepares the List request. +func (client StreamingJobsClient) ListPreparer(ctx context.Context, expand string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(expand) > 0 { + queryParameters["$expand"] = autorest.Encode("query", expand) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSender sends the List request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) ListSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListResponder handles the response to the List request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) ListResponder(resp *http.Response) (result StreamingJobListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listNextResults retrieves the next set of results, if any. +func (client StreamingJobsClient) listNextResults(ctx context.Context, lastResults StreamingJobListResult) (result StreamingJobListResult, err error) { + req, err := lastResults.streamingJobListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listNextResults", resp, "Failure sending next results request") + } + result, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListComplete enumerates all values, automatically crossing page boundaries as required. +func (client StreamingJobsClient) ListComplete(ctx context.Context, expand string) (result StreamingJobListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.List") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.List(ctx, expand) + return +} + +// ListByResourceGroup lists all of the streaming jobs in the specified resource group. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// expand - the $expand OData query parameter. This is a comma-separated list of additional streaming job +// properties to include in the response, beyond the default set returned when this parameter is absent. The +// default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and +// 'functions'. +func (client StreamingJobsClient) ListByResourceGroup(ctx context.Context, resourceGroupName string, expand string) (result StreamingJobListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.ListByResourceGroup") + defer func() { + sc := -1 + if result.sjlr.Response.Response != nil { + sc = result.sjlr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByResourceGroupNextResults + req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName, expand) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "ListByResourceGroup", nil, "Failure preparing request") + return + } + + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.sjlr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "ListByResourceGroup", resp, "Failure sending request") + return + } + + result.sjlr, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "ListByResourceGroup", resp, "Failure responding to request") + } + + return +} + +// ListByResourceGroupPreparer prepares the ListByResourceGroup request. +func (client StreamingJobsClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string, expand string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(expand) > 0 { + queryParameters["$expand"] = autorest.Encode("query", expand) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) ListByResourceGroupResponder(resp *http.Response) (result StreamingJobListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByResourceGroupNextResults retrieves the next set of results, if any. +func (client StreamingJobsClient) listByResourceGroupNextResults(ctx context.Context, lastResults StreamingJobListResult) (result StreamingJobListResult, err error) { + req, err := lastResults.streamingJobListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listByResourceGroupNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listByResourceGroupNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listByResourceGroupNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required. +func (client StreamingJobsClient) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string, expand string) (result StreamingJobListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.ListByResourceGroup") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByResourceGroup(ctx, resourceGroupName, expand) + return +} + +// Start starts a streaming job. Once a job is started it will start processing input events and produce output. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// startJobParameters - parameters applicable to a start streaming job operation. +func (client StreamingJobsClient) Start(ctx context.Context, resourceGroupName string, jobName string, startJobParameters *StartStreamingJobParameters) (result StreamingJobsStartFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Start") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.StartPreparer(ctx, resourceGroupName, jobName, startJobParameters) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Start", nil, "Failure preparing request") + return + } + + result, err = client.StartSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Start", result.Response(), "Failure sending request") + return + } + + return +} + +// StartPreparer prepares the Start request. +func (client StreamingJobsClient) StartPreparer(ctx context.Context, resourceGroupName string, jobName string, startJobParameters *StartStreamingJobParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if startJobParameters != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(startJobParameters)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// StartSender sends the Start request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) StartSender(req *http.Request) (future StreamingJobsStartFuture, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// StartResponder handles the response to the Start request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) StartResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByClosing()) + result.Response = resp + return +} + +// Stop stops a running streaming job. This will cause a running streaming job to stop processing input events and +// producing output. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +func (client StreamingJobsClient) Stop(ctx context.Context, resourceGroupName string, jobName string) (result StreamingJobsStopFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Stop") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.StopPreparer(ctx, resourceGroupName, jobName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Stop", nil, "Failure preparing request") + return + } + + result, err = client.StopSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Stop", result.Response(), "Failure sending request") + return + } + + return +} + +// StopPreparer prepares the Stop request. +func (client StreamingJobsClient) StopPreparer(ctx context.Context, resourceGroupName string, jobName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// StopSender sends the Stop request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) StopSender(req *http.Request) (future StreamingJobsStopFuture, err error) { + var resp *http.Response + resp, err = autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// StopResponder handles the response to the Stop request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) StopResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByClosing()) + result.Response = resp + return +} + +// Update updates an existing streaming job. This can be used to partially update (ie. update one or two properties) a +// streaming job without affecting the rest the job definition. +// Parameters: +// streamingJob - a streaming job object. The properties specified here will overwrite the corresponding +// properties in the existing streaming job (ie. Those properties will be updated). Any properties that are set +// to null here will mean that the corresponding property in the existing input will remain the same and not +// change as a result of this PATCH operation. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// ifMatch - the ETag of the streaming job. Omit this value to always overwrite the current record set. Specify +// the last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client StreamingJobsClient) Update(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string) (result StreamingJob, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.UpdatePreparer(ctx, streamingJob, resourceGroupName, jobName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client StreamingJobsClient) UpdatePreparer(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), + autorest.WithJSON(streamingJob), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) UpdateResponder(resp *http.Response) (result StreamingJob, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/subscriptions.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/subscriptions.go new file mode 100644 index 000000000000..963bc3f4c577 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/subscriptions.go @@ -0,0 +1,117 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// SubscriptionsClient is the stream Analytics Client +type SubscriptionsClient struct { + BaseClient +} + +// NewSubscriptionsClient creates an instance of the SubscriptionsClient client. +func NewSubscriptionsClient(subscriptionID string) SubscriptionsClient { + return NewSubscriptionsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewSubscriptionsClientWithBaseURI creates an instance of the SubscriptionsClient client. +func NewSubscriptionsClientWithBaseURI(baseURI string, subscriptionID string) SubscriptionsClient { + return SubscriptionsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// ListQuotas retrieves the subscription's current quota information in a particular region. +// Parameters: +// location - the region in which to retrieve the subscription's quota information. You can find out which +// regions Azure Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/ +func (client SubscriptionsClient) ListQuotas(ctx context.Context, location string) (result SubscriptionQuotasListResult, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SubscriptionsClient.ListQuotas") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.ListQuotasPreparer(ctx, location) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", nil, "Failure preparing request") + return + } + + resp, err := client.ListQuotasSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", resp, "Failure sending request") + return + } + + result, err = client.ListQuotasResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", resp, "Failure responding to request") + } + + return +} + +// ListQuotasPreparer prepares the ListQuotas request. +func (client SubscriptionsClient) ListQuotasPreparer(ctx context.Context, location string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "location": autorest.Encode("path", location), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListQuotasSender sends the ListQuotas request. The method will close the +// http.Response Body if it receives an error. +func (client SubscriptionsClient) ListQuotasSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// ListQuotasResponder handles the response to the ListQuotas request. The method always +// closes the http.Response Body. +func (client SubscriptionsClient) ListQuotasResponder(resp *http.Response) (result SubscriptionQuotasListResult, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/transformations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/transformations.go new file mode 100644 index 000000000000..fd74d0afd39c --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/transformations.go @@ -0,0 +1,311 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// TransformationsClient is the stream Analytics Client +type TransformationsClient struct { + BaseClient +} + +// NewTransformationsClient creates an instance of the TransformationsClient client. +func NewTransformationsClient(subscriptionID string) TransformationsClient { + return NewTransformationsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewTransformationsClientWithBaseURI creates an instance of the TransformationsClient client. +func NewTransformationsClientWithBaseURI(baseURI string, subscriptionID string) TransformationsClient { + return TransformationsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates a transformation or replaces an already existing transformation under an existing streaming +// job. +// Parameters: +// transformation - the definition of the transformation that will be used to create a new transformation or +// replace the existing one under the streaming job. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// transformationName - the name of the transformation. +// ifMatch - the ETag of the transformation. Omit this value to always overwrite the current transformation. +// Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new transformation to be created, but to prevent updating an existing +// transformation. Other values will result in a 412 Pre-condition Failed response. +func (client TransformationsClient) CreateOrReplace(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string, ifNoneMatch string) (result Transformation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TransformationsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreateOrReplacePreparer(ctx, transformation, resourceGroupName, jobName, transformationName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrReplaceSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "CreateOrReplace", resp, "Failure sending request") + return + } + + result, err = client.CreateOrReplaceResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "CreateOrReplace", resp, "Failure responding to request") + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client TransformationsClient) CreateOrReplacePreparer(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "transformationName": autorest.Encode("path", transformationName), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}", pathParameters), + autorest.WithJSON(transformation), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client TransformationsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client TransformationsClient) CreateOrReplaceResponder(resp *http.Response) (result Transformation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Get gets details about the specified transformation. +// Parameters: +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// transformationName - the name of the transformation. +func (client TransformationsClient) Get(ctx context.Context, resourceGroupName string, jobName string, transformationName string) (result Transformation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TransformationsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, transformationName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client TransformationsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, transformationName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "transformationName": autorest.Encode("path", transformationName), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client TransformationsClient) GetSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client TransformationsClient) GetResponder(resp *http.Response) (result Transformation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Update updates an existing transformation under an existing streaming job. This can be used to partially update (ie. +// update one or two properties) a transformation without affecting the rest the job or transformation definition. +// Parameters: +// transformation - a Transformation object. The properties specified here will overwrite the corresponding +// properties in the existing transformation (ie. Those properties will be updated). Any properties that are +// set to null here will mean that the corresponding property in the existing transformation will remain the +// same and not change as a result of this PATCH operation. +// resourceGroupName - the name of the resource group that contains the resource. You can obtain this value +// from the Azure Resource Manager API or the portal. +// jobName - the name of the streaming job. +// transformationName - the name of the transformation. +// ifMatch - the ETag of the transformation. Omit this value to always overwrite the current transformation. +// Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client TransformationsClient) Update(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string) (result Transformation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TransformationsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.UpdatePreparer(ctx, transformation, resourceGroupName, jobName, transformationName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client TransformationsClient) UpdatePreparer(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "transformationName": autorest.Encode("path", transformationName), + } + + const APIVersion = "2016-03-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}", pathParameters), + autorest.WithJSON(transformation), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client TransformationsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return autorest.SendWithSender(client, req, + azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client TransformationsClient) UpdateResponder(resp *http.Response) (result Transformation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/version.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/version.go new file mode 100644 index 000000000000..03b1142e8e78 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics/version.go @@ -0,0 +1,30 @@ +package streamanalytics + +import "github.com/Azure/azure-sdk-for-go/version" + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +// UserAgent returns the UserAgent string to use when sending http.Requests. +func UserAgent() string { + return "Azure-SDK-For-Go/" + version.Number + " streamanalytics/2016-03-01" +} + +// Version returns the semantic version (see http://semver.org) of the client. +func Version() string { + return version.Number +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 4bd3cb64b009..52439bbcffa1 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -58,6 +58,7 @@ github.com/Azure/azure-sdk-for-go/services/search/mgmt/2015-08-19/search github.com/Azure/azure-sdk-for-go/services/servicebus/mgmt/2017-04-01/servicebus github.com/Azure/azure-sdk-for-go/services/servicefabric/mgmt/2018-02-01/servicefabric github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2018-02-01/storage +github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics github.com/Azure/azure-sdk-for-go/services/trafficmanager/mgmt/2018-04-01/trafficmanager github.com/Azure/azure-sdk-for-go/services/web/mgmt/2018-02-01/web github.com/Azure/azure-sdk-for-go/storage @@ -156,8 +157,8 @@ github.com/hashicorp/go-azure-helpers/response # github.com/hashicorp/go-cleanhttp v0.5.0 github.com/hashicorp/go-cleanhttp # github.com/hashicorp/go-getter v0.0.0-20180327010114-90bb99a48d86 -github.com/hashicorp/go-getter github.com/hashicorp/go-getter/helper/url +github.com/hashicorp/go-getter # github.com/hashicorp/go-hclog v0.0.0-20170903163258-8105cc0a3736 github.com/hashicorp/go-hclog # github.com/hashicorp/go-multierror v1.0.0 From c361b217e90b348a60715348976bd5c2595653ba Mon Sep 17 00:00:00 2001 From: tombuildsstuff Date: Mon, 15 Apr 2019 22:16:13 +0200 Subject: [PATCH 2/4] r/stream_analytics: common helpers --- .../helpers/azure/stream_analytics_output.go | 165 ++++++++++++++++++ .../azure/stream_analytics_stream_input.go | 134 ++++++++++++++ azurerm/helpers/validate/stream_analytics.go | 28 +++ .../helpers/validate/stream_analytics_test.go | 34 ++++ 4 files changed, 361 insertions(+) create mode 100644 azurerm/helpers/azure/stream_analytics_output.go create mode 100644 azurerm/helpers/azure/stream_analytics_stream_input.go create mode 100644 azurerm/helpers/validate/stream_analytics.go create mode 100644 azurerm/helpers/validate/stream_analytics_test.go diff --git a/azurerm/helpers/azure/stream_analytics_output.go b/azurerm/helpers/azure/stream_analytics_output.go new file mode 100644 index 000000000000..a91b427faa16 --- /dev/null +++ b/azurerm/helpers/azure/stream_analytics_output.go @@ -0,0 +1,165 @@ +package azure + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform/helper/validation" + + "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics" + "github.com/hashicorp/terraform/helper/schema" +) + +func SchemaStreamAnalyticsOutputSerialization() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(streamanalytics.TypeAvro), + string(streamanalytics.TypeCsv), + string(streamanalytics.TypeJSON), + }, false), + }, + + "field_delimiter": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + " ", + ",", + " ", + "|", + ";", + }, false), + }, + + "encoding": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(streamanalytics.UTF8), + }, false), + }, + + "format": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(streamanalytics.Array), + string(streamanalytics.LineSeparated), + }, false), + }, + }, + }, + } +} + +func ExpandStreamAnalyticsOutputSerialization(input []interface{}) (streamanalytics.BasicSerialization, error) { + v := input[0].(map[string]interface{}) + + outputType := streamanalytics.Type(v["type"].(string)) + encoding := v["encoding"].(string) + fieldDelimiter := v["field_delimiter"].(string) + format := v["format"].(string) + + switch outputType { + case streamanalytics.TypeAvro: + if encoding != "" { + return nil, fmt.Errorf("`encoding` cannot be set when `type` is set to `Avro`") + } + if fieldDelimiter != "" { + return nil, fmt.Errorf("`field_delimiter` cannot be set when `type` is set to `Avro`") + } + if format != "" { + return nil, fmt.Errorf("`format` cannot be set when `type` is set to `Avro`") + } + return streamanalytics.AvroSerialization{ + Type: streamanalytics.TypeAvro, + Properties: map[string]interface{}{}, + }, nil + + case streamanalytics.TypeCsv: + if encoding == "" { + return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Csv`") + } + if fieldDelimiter == "" { + return nil, fmt.Errorf("`field_delimiter` must be set when `type` is set to `Csv`") + } + if format != "" { + return nil, fmt.Errorf("`format` cannot be set when `type` is set to `Csv`") + } + return streamanalytics.CsvSerialization{ + Type: streamanalytics.TypeCsv, + CsvSerializationProperties: &streamanalytics.CsvSerializationProperties{ + Encoding: streamanalytics.Encoding(encoding), + FieldDelimiter: utils.String(fieldDelimiter), + }, + }, nil + + case streamanalytics.TypeJSON: + if encoding == "" { + return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Json`") + } + if format == "" { + return nil, fmt.Errorf("`format` must be specified when `type` is set to `Json`") + } + if fieldDelimiter != "" { + return nil, fmt.Errorf("`field_delimiter` cannot be set when `type` is set to `Json`") + } + + return streamanalytics.JSONSerialization{ + Type: streamanalytics.TypeJSON, + JSONSerializationProperties: &streamanalytics.JSONSerializationProperties{ + Encoding: streamanalytics.Encoding(encoding), + }, + }, nil + } + + return nil, fmt.Errorf("Unsupported Output Type %q", outputType) +} + +func FlattenStreamAnalyticsOutputSerialization(input streamanalytics.BasicSerialization) []interface{} { + var encoding string + var outputType string + var fieldDelimiter string + var format string + + if _, ok := input.AsAvroSerialization(); ok { + outputType = string(streamanalytics.TypeAvro) + } + + if v, ok := input.AsCsvSerialization(); ok { + if props := v.CsvSerializationProperties; props != nil { + encoding = string(props.Encoding) + if props.FieldDelimiter != nil { + fieldDelimiter = *props.FieldDelimiter + } + } + + outputType = string(streamanalytics.TypeCsv) + } + + if v, ok := input.AsJSONSerialization(); ok { + if props := v.JSONSerializationProperties; props != nil { + encoding = string(props.Encoding) + format = string(props.Format) + } + + outputType = string(streamanalytics.TypeJSON) + } + return []interface{}{ + map[string]interface{}{ + "encoding": encoding, + "type": outputType, + "format": format, + "field_delimiter": fieldDelimiter, + }, + } +} diff --git a/azurerm/helpers/azure/stream_analytics_stream_input.go b/azurerm/helpers/azure/stream_analytics_stream_input.go new file mode 100644 index 000000000000..4074b3c0fa62 --- /dev/null +++ b/azurerm/helpers/azure/stream_analytics_stream_input.go @@ -0,0 +1,134 @@ +package azure + +import ( + "fmt" + + "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics" + "github.com/hashicorp/terraform/helper/schema" + "github.com/hashicorp/terraform/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func SchemaStreamAnalyticsStreamInputSerialization() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(streamanalytics.TypeAvro), + string(streamanalytics.TypeCsv), + string(streamanalytics.TypeJSON), + }, false), + }, + + "field_delimiter": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + " ", + ",", + " ", + "|", + ";", + }, false), + }, + + "encoding": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(streamanalytics.UTF8), + }, false), + }, + }, + }, + } +} + +func ExpandStreamAnalyticsStreamInputSerialization(input []interface{}) (streamanalytics.BasicSerialization, error) { + v := input[0].(map[string]interface{}) + + inputType := streamanalytics.Type(v["type"].(string)) + encoding := v["encoding"].(string) + fieldDelimiter := v["field_delimiter"].(string) + + switch inputType { + case streamanalytics.TypeAvro: + return streamanalytics.AvroSerialization{ + Type: streamanalytics.TypeAvro, + Properties: map[string]interface{}{}, + }, nil + + case streamanalytics.TypeCsv: + if encoding == "" { + return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Csv`") + } + if fieldDelimiter == "" { + return nil, fmt.Errorf("`field_delimiter` must be set when `type` is set to `Csv`") + } + return streamanalytics.CsvSerialization{ + Type: streamanalytics.TypeCsv, + CsvSerializationProperties: &streamanalytics.CsvSerializationProperties{ + Encoding: streamanalytics.Encoding(encoding), + FieldDelimiter: utils.String(fieldDelimiter), + }, + }, nil + + case streamanalytics.TypeJSON: + if encoding == "" { + return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Json`") + } + + return streamanalytics.JSONSerialization{ + Type: streamanalytics.TypeJSON, + JSONSerializationProperties: &streamanalytics.JSONSerializationProperties{ + Encoding: streamanalytics.Encoding(encoding), + }, + }, nil + } + + return nil, fmt.Errorf("Unsupported Input Type %q", inputType) +} + +func FlattenStreamAnalyticsStreamInputSerialization(input streamanalytics.BasicSerialization) []interface{} { + var encoding string + var fieldDelimiter string + var inputType string + + if _, ok := input.AsAvroSerialization(); ok { + inputType = string(streamanalytics.TypeAvro) + } + + if v, ok := input.AsCsvSerialization(); ok { + if props := v.CsvSerializationProperties; props != nil { + encoding = string(props.Encoding) + + if props.FieldDelimiter != nil { + fieldDelimiter = *props.FieldDelimiter + } + } + + inputType = string(streamanalytics.TypeCsv) + } + + if v, ok := input.AsJSONSerialization(); ok { + if props := v.JSONSerializationProperties; props != nil { + encoding = string(props.Encoding) + } + + inputType = string(streamanalytics.TypeJSON) + } + + return []interface{}{ + map[string]interface{}{ + "encoding": encoding, + "type": inputType, + "field_delimiter": fieldDelimiter, + }, + } +} diff --git a/azurerm/helpers/validate/stream_analytics.go b/azurerm/helpers/validate/stream_analytics.go new file mode 100644 index 000000000000..adc19794a417 --- /dev/null +++ b/azurerm/helpers/validate/stream_analytics.go @@ -0,0 +1,28 @@ +package validate + +import "fmt" + +func StreamAnalyticsJobStreamingUnits(i interface{}, k string) (w []string, es []error) { + v, ok := i.(int) + if !ok { + es = append(es, fmt.Errorf("expected type of %s to be int", k)) + return + } + + // Property 'streamingUnits' value '5' is not in the acceptable set: '1','3','6','12', and multiples of 6 up to your quota" + if v == 1 || v == 3 { + return + } + + if v < 1 || v > 120 { + es = append(es, fmt.Errorf("expected %s to be in the range (1 - 120), got %d", k, v)) + return + } + + if v%6 != 0 { + es = append(es, fmt.Errorf("expected %s to be divisible by 6, got %d", k, v)) + return + } + + return +} diff --git a/azurerm/helpers/validate/stream_analytics_test.go b/azurerm/helpers/validate/stream_analytics_test.go new file mode 100644 index 000000000000..f53fd548322e --- /dev/null +++ b/azurerm/helpers/validate/stream_analytics_test.go @@ -0,0 +1,34 @@ +package validate + +import ( + "testing" +) + +func TestStreamAnalyticsJobStreamingUnits(t *testing.T) { + cases := map[int]bool{ + 0: false, + 1: true, + 2: false, + 3: true, + 4: false, + 5: false, + 6: true, + 7: false, + 8: false, + 9: false, + 10: false, + 11: false, + 12: true, + 18: true, + 24: true, + 30: true, + } + for i, shouldBeValid := range cases { + _, errors := StreamAnalyticsJobStreamingUnits(i, "streaming_units") + + isValid := len(errors) == 0 + if shouldBeValid != isValid { + t.Fatalf("Expected %d to be %t but got %t", i, shouldBeValid, isValid) + } + } +} From b726b0bc6ca9d4516b4cf9f6c6386b02af0d530b Mon Sep 17 00:00:00 2001 From: tombuildsstuff Date: Mon, 15 Apr 2019 22:17:32 +0200 Subject: [PATCH 3/4] New Resource/Data Source: `azurerm_stream_analytics_job` --- azurerm/data_source_stream_analytics_job.go | 130 ++++++++ .../data_source_stream_analytics_job_test.go | 40 +++ azurerm/provider.go | 2 + azurerm/resource_arm_stream_analytics_job.go | 291 ++++++++++++++++++ .../resource_arm_stream_analytics_job_test.go | 223 ++++++++++++++ website/azurerm.erb | 13 + .../docs/d/stream_analytics_job.html.markdown | 56 ++++ .../docs/r/stream_analytics_job.html.markdown | 83 +++++ 8 files changed, 838 insertions(+) create mode 100644 azurerm/data_source_stream_analytics_job.go create mode 100644 azurerm/data_source_stream_analytics_job_test.go create mode 100644 azurerm/resource_arm_stream_analytics_job.go create mode 100644 azurerm/resource_arm_stream_analytics_job_test.go create mode 100644 website/docs/d/stream_analytics_job.html.markdown create mode 100644 website/docs/r/stream_analytics_job.html.markdown diff --git a/azurerm/data_source_stream_analytics_job.go b/azurerm/data_source_stream_analytics_job.go new file mode 100644 index 000000000000..a4a3a9fd43dd --- /dev/null +++ b/azurerm/data_source_stream_analytics_job.go @@ -0,0 +1,130 @@ +package azurerm + +import ( + "fmt" + + "github.com/hashicorp/terraform/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceArmStreamAnalyticsJob() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmStreamAnalyticsJobRead, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "resource_group_name": resourceGroupNameForDataSourceSchema(), + + "location": locationForDataSourceSchema(), + + "compatibility_level": { + Type: schema.TypeString, + Computed: true, + }, + + "data_locale": { + Type: schema.TypeString, + Computed: true, + }, + + "events_late_arrival_max_delay_in_seconds": { + Type: schema.TypeInt, + Computed: true, + }, + + "events_out_of_order_max_delay_in_seconds": { + Type: schema.TypeInt, + Computed: true, + }, + + "events_out_of_order_policy": { + Type: schema.TypeString, + Computed: true, + }, + + "job_id": { + Type: schema.TypeString, + Computed: true, + }, + + "output_error_policy": { + Type: schema.TypeString, + Computed: true, + }, + + "streaming_units": { + Type: schema.TypeInt, + Computed: true, + }, + + "transformation_query": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceArmStreamAnalyticsJobRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).streamAnalyticsJobsClient + transformationsClient := meta.(*ArmClient).streamAnalyticsTransformationsClient + ctx := meta.(*ArmClient).StopContext + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + resp, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Stream Analytics Job %q was not found in Resource Group %q!", name, resourceGroup) + } + + return fmt.Errorf("Error retrieving Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + transformation, err := transformationsClient.Get(ctx, resourceGroup, name, "Transformation") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Transformation for Stream Analytics Job %q was not found in Resource Group %q!", name, resourceGroup) + } + + return fmt.Errorf("Error retrieving Transformation for Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.SetId(*resp.ID) + + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + + if resp.Location != nil { + d.Set("location", azureRMNormalizeLocation(*resp.Location)) + } + + if props := resp.StreamingJobProperties; props != nil { + d.Set("compatibility_level", string(props.CompatibilityLevel)) + d.Set("data_locale", props.DataLocale) + if props.EventsLateArrivalMaxDelayInSeconds != nil { + d.Set("events_late_arrival_max_delay_in_seconds", int(*props.EventsLateArrivalMaxDelayInSeconds)) + } + if props.EventsOutOfOrderMaxDelayInSeconds != nil { + d.Set("events_out_of_order_max_delay_in_seconds", int(*props.EventsOutOfOrderMaxDelayInSeconds)) + } + d.Set("events_out_of_order_policy", string(props.EventsOutOfOrderPolicy)) + d.Set("job_id", props.JobID) + d.Set("output_error_policy", string(props.OutputErrorPolicy)) + } + + if props := transformation.TransformationProperties; props != nil { + if units := props.StreamingUnits; units != nil { + d.Set("streaming_units", int(*units)) + } + d.Set("transformation_query", props.Query) + } + + return nil +} diff --git a/azurerm/data_source_stream_analytics_job_test.go b/azurerm/data_source_stream_analytics_job_test.go new file mode 100644 index 000000000000..b5d1a9556006 --- /dev/null +++ b/azurerm/data_source_stream_analytics_job_test.go @@ -0,0 +1,40 @@ +package azurerm + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" +) + +func TestAccDataSourceAzureRMStreamAnalyticsJob_basic(t *testing.T) { + dataSourceName := "data.azurerm_stream_analytics_job.test" + ri := tf.AccRandTimeInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceAzureRMStreamAnalyticsJob_basic(ri, testLocation()), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(dataSourceName, "job_id"), + ), + }, + }, + }) +} + +func testAccDataSourceAzureRMStreamAnalyticsJob_basic(rInt int, location string) string { + config := testAccAzureRMStreamAnalyticsJob_basic(rInt, location) + return fmt.Sprintf(` +%s + +data "azurerm_stream_analytics_job" "test" { + name = "${azurerm_stream_analytics_job.test.name}" + resource_group_name = "${azurerm_stream_analytics_job.test.resource_group_name}" +} +`, config) +} diff --git a/azurerm/provider.go b/azurerm/provider.go index 7aa0cf949e56..95b8073c9a5f 100644 --- a/azurerm/provider.go +++ b/azurerm/provider.go @@ -159,6 +159,7 @@ func Provider() terraform.ResourceProvider { "azurerm_shared_image_version": dataSourceArmSharedImageVersion(), "azurerm_shared_image": dataSourceArmSharedImage(), "azurerm_snapshot": dataSourceArmSnapshot(), + "azurerm_stream_analytics_job": dataSourceArmStreamAnalyticsJob(), "azurerm_storage_account_sas": dataSourceArmStorageAccountSharedAccessSignature(), "azurerm_storage_account": dataSourceArmStorageAccount(), "azurerm_subnet": dataSourceArmSubnet(), @@ -380,6 +381,7 @@ func Provider() terraform.ResourceProvider { "azurerm_storage_queue": resourceArmStorageQueue(), "azurerm_storage_share": resourceArmStorageShare(), "azurerm_storage_table": resourceArmStorageTable(), + "azurerm_stream_analytics_job": resourceArmStreamAnalyticsJob(), "azurerm_subnet_network_security_group_association": resourceArmSubnetNetworkSecurityGroupAssociation(), "azurerm_subnet_route_table_association": resourceArmSubnetRouteTableAssociation(), "azurerm_subnet": resourceArmSubnet(), diff --git a/azurerm/resource_arm_stream_analytics_job.go b/azurerm/resource_arm_stream_analytics_job.go new file mode 100644 index 000000000000..93b386a4565b --- /dev/null +++ b/azurerm/resource_arm_stream_analytics_job.go @@ -0,0 +1,291 @@ +package azurerm + +import ( + "fmt" + "log" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + + "github.com/hashicorp/terraform/helper/validation" + + "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics" + + "github.com/hashicorp/terraform/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmStreamAnalyticsJob() *schema.Resource { + return &schema.Resource{ + Create: resourceArmStreamAnalyticsJobCreateUpdate, + Read: resourceArmStreamAnalyticsJobRead, + Update: resourceArmStreamAnalyticsJobCreateUpdate, + Delete: resourceArmStreamAnalyticsJobDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "resource_group_name": resourceGroupNameSchema(), + + "location": locationSchema(), + + "compatibility_level": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + // values found in the other API the portal uses + string(streamanalytics.OneFullStopZero), + "1.1", + // TODO: support for 1.2 when this is fixed: + // https://github.com/Azure/azure-rest-api-specs/issues/5604 + //"1.2", + }, false), + }, + + "data_locale": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "events_late_arrival_max_delay_in_seconds": { + Type: schema.TypeInt, + Required: true, + // portal allows for up to 20d 23h 59m 59s + ValidateFunc: validation.IntBetween(-1, 1814399), + }, + + "events_out_of_order_max_delay_in_seconds": { + Type: schema.TypeInt, + Required: true, + // portal allows for up to 9m 59s + ValidateFunc: validation.IntBetween(0, 599), + }, + + "events_out_of_order_policy": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(streamanalytics.Adjust), + string(streamanalytics.Drop), + }, false), + }, + + "output_error_policy": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(streamanalytics.OutputErrorPolicyDrop), + string(streamanalytics.OutputErrorPolicyStop), + }, false), + }, + + "streaming_units": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.StreamAnalyticsJobStreamingUnits, + }, + + "transformation_query": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "job_id": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceArmStreamAnalyticsJobCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).streamAnalyticsJobsClient + transformationsClient := meta.(*ArmClient).streamAnalyticsTransformationsClient + ctx := meta.(*ArmClient).StopContext + + log.Printf("[INFO] preparing arguments for Azure Stream Analytics Job creation.") + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + if requireResourcesToBeImported && d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Stream Analytics Job %q (Resource Group %q): %s", name, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_stream_analytics_job", *existing.ID) + } + } + + compatibilityLevel := d.Get("compatibility_level").(string) + dataLocale := d.Get("data_locale").(string) + eventsLateArrivalMaxDelayInSeconds := d.Get("events_late_arrival_max_delay_in_seconds").(int) + eventsOutOfOrderMaxDelayInSeconds := d.Get("events_out_of_order_max_delay_in_seconds").(int) + eventsOutOfOrderPolicy := d.Get("events_out_of_order_policy").(string) + location := azureRMNormalizeLocation(d.Get("location").(string)) + outputErrorPolicy := d.Get("output_error_policy").(string) + streamingUnits := d.Get("streaming_units").(int) + transformationQuery := d.Get("transformation_query").(string) + + // needs to be defined inline for a Create but via a separate API for Update + transformation := streamanalytics.Transformation{ + Name: utils.String("Transformation"), + TransformationProperties: &streamanalytics.TransformationProperties{ + StreamingUnits: utils.Int32(int32(streamingUnits)), + Query: utils.String(transformationQuery), + }, + } + + props := streamanalytics.StreamingJob{ + Name: utils.String(name), + Location: utils.String(location), + StreamingJobProperties: &streamanalytics.StreamingJobProperties{ + Sku: &streamanalytics.Sku{ + Name: streamanalytics.Standard, + }, + CompatibilityLevel: streamanalytics.CompatibilityLevel(compatibilityLevel), + DataLocale: utils.String(dataLocale), + EventsLateArrivalMaxDelayInSeconds: utils.Int32(int32(eventsLateArrivalMaxDelayInSeconds)), + EventsOutOfOrderMaxDelayInSeconds: utils.Int32(int32(eventsOutOfOrderMaxDelayInSeconds)), + EventsOutOfOrderPolicy: streamanalytics.EventsOutOfOrderPolicy(eventsOutOfOrderPolicy), + OutputErrorPolicy: streamanalytics.OutputErrorPolicy(outputErrorPolicy), + }, + } + + if d.IsNewResource() { + props.StreamingJobProperties.Transformation = &transformation + + future, err := client.CreateOrReplace(ctx, props, resourceGroup, name, "", "") + if err != nil { + return fmt.Errorf("Error Creating Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for creation of Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + return err + } + if read.ID == nil { + return fmt.Errorf("Cannot read ID of Stream Analytics Job %q (Resource Group %q)", name, resourceGroup) + } + + d.SetId(*read.ID) + } else { + if _, err := client.Update(ctx, props, resourceGroup, name, ""); err != nil { + return fmt.Errorf("Error Updating Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if _, err := transformationsClient.Update(ctx, transformation, resourceGroup, name, "Transformation", ""); err != nil { + return fmt.Errorf("Error Updating Transformation for Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + + return resourceArmStreamAnalyticsJobRead(d, meta) +} + +func resourceArmStreamAnalyticsJobRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).streamAnalyticsJobsClient + transformationsClient := meta.(*ArmClient).streamAnalyticsTransformationsClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + name := id.Path["streamingjobs"] + + resp, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Stream Analytics Job %q was not found in Resource Group %q - removing from state!", name, resourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + transformation, err := transformationsClient.Get(ctx, resourceGroup, name, "Transformation") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Transformation for Stream Analytics Job %q was not found in Resource Group %q - removing from state!", name, resourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Transformation for Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + + if resp.Location != nil { + d.Set("location", azureRMNormalizeLocation(*resp.Location)) + } + + if props := resp.StreamingJobProperties; props != nil { + d.Set("compatibility_level", string(props.CompatibilityLevel)) + d.Set("data_locale", props.DataLocale) + if props.EventsLateArrivalMaxDelayInSeconds != nil { + d.Set("events_late_arrival_max_delay_in_seconds", int(*props.EventsLateArrivalMaxDelayInSeconds)) + } + if props.EventsOutOfOrderMaxDelayInSeconds != nil { + d.Set("events_out_of_order_max_delay_in_seconds", int(*props.EventsOutOfOrderMaxDelayInSeconds)) + } + d.Set("events_out_of_order_policy", string(props.EventsOutOfOrderPolicy)) + d.Set("output_error_policy", string(props.OutputErrorPolicy)) + + // Computed + d.Set("job_id", props.JobID) + } + + if props := transformation.TransformationProperties; props != nil { + if units := props.StreamingUnits; units != nil { + d.Set("streaming_units", int(*units)) + } + d.Set("transformation_query", props.Query) + } + + return nil +} + +func resourceArmStreamAnalyticsJobDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).streamAnalyticsJobsClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + name := id.Path["streamingjobs"] + + future, err := client.Delete(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("Error deleting Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for completion for Stream Analytics Job %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + return nil +} diff --git a/azurerm/resource_arm_stream_analytics_job_test.go b/azurerm/resource_arm_stream_analytics_job_test.go new file mode 100644 index 000000000000..4a4e49d52185 --- /dev/null +++ b/azurerm/resource_arm_stream_analytics_job_test.go @@ -0,0 +1,223 @@ +package azurerm + +import ( + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" +) + +func TestAccAzureRMStreamAnalyticsJob_basic(t *testing.T) { + resourceName := "azurerm_stream_analytics_job.test" + ri := tf.AccRandTimeInt() + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsJob_basic(ri, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsJobExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccAzureRMStreamAnalyticsJob_requiresImport(t *testing.T) { + if !requireResourcesToBeImported { + t.Skip("Skipping since resources aren't required to be imported") + return + } + + resourceName := "azurerm_stream_analytics_job.test" + ri := tf.AccRandTimeInt() + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsJob_basic(ri, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsJobExists(resourceName), + ), + }, + { + Config: testAccAzureRMStreamAnalyticsJob_requiresImport(ri, location), + ExpectError: testRequiresImportError("azurerm_stream_analytics_job"), + }, + }, + }) +} + +func TestAccAzureRMStreamAnalyticsJob_update(t *testing.T) { + resourceName := "azurerm_stream_analytics_job.test" + ri := tf.AccRandTimeInt() + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsJob_basic(ri, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsJobExists(resourceName), + ), + }, + { + Config: testAccAzureRMStreamAnalyticsJob_updated(ri, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsJobExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testCheckAzureRMStreamAnalyticsJobExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + conn := testAccProvider.Meta().(*ArmClient).streamAnalyticsJobsClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + resp, err := conn.Get(ctx, resourceGroup, name, "") + if err != nil { + return fmt.Errorf("Bad: Get on streamAnalyticsJobsClient: %+v", err) + } + + if resp.StatusCode == http.StatusNotFound { + return fmt.Errorf("Bad: Stream Analytics Job %q (resource group: %q) does not exist", name, resourceGroup) + } + + return nil + } +} + +func testCheckAzureRMStreamAnalyticsJobDestroy(s *terraform.State) error { + conn := testAccProvider.Meta().(*ArmClient).streamAnalyticsJobsClient + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_stream_analytics_job" { + continue + } + + name := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + ctx := testAccProvider.Meta().(*ArmClient).StopContext + resp, err := conn.Get(ctx, resourceGroup, name, "") + if err != nil { + return nil + } + + if resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("Stream Analytics Job still exists:\n%#v", resp.StreamingJobProperties) + } + } + + return nil +} + +func testAccAzureRMStreamAnalyticsJob_basic(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_stream_analytics_job" "test" { + name = "acctestjob-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + compatibility_level = "1.0" + data_locale = "en-GB" + events_late_arrival_max_delay_in_seconds = 60 + events_out_of_order_max_delay_in_seconds = 50 + events_out_of_order_policy = "Adjust" + output_error_policy = "Drop" + streaming_units = 3 + + transformation_query = <azurerm_shared_image_version + > + azurerm_stream_analytics_job + + > azurerm_storage_account @@ -1360,6 +1364,15 @@ + > + Stream Analytics Resources + + + > Storage Resources