Skip to content

Commit

Permalink
Generated from d7031cbf36dccb5c63daddbe445657acd56a101f
Browse files Browse the repository at this point in the history
back
  • Loading branch information
SDK Automation committed Jun 22, 2020
1 parent deb77e6 commit da7f0aa
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 85 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -82,12 +82,12 @@ class WorkspaceSkuNameEnum(str, Enum):

class DataIngestionStatus(str, Enum):

respect_quota = "RespectQuota"
force_on = "ForceOn"
force_off = "ForceOff"
over_quota = "OverQuota"
subscription_suspended = "SubscriptionSuspended"
approaching_quota = "ApproachingQuota"
respect_quota = "RespectQuota" #: Ingestion enabled following daily cap quota reset, or subscription enablement.
force_on = "ForceOn" #: Ingestion started following service setting change.
force_off = "ForceOff" #: Ingestion stopped following service setting change.
over_quota = "OverQuota" #: Reached daily cap quota, ingestion stopped.
subscription_suspended = "SubscriptionSuspended" #: Ingestion stopped following suspended subscription.
approaching_quota = "ApproachingQuota" #: 80% of daily cap quota reached.


class WorkspaceEntityStatus(str, Enum):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -528,8 +528,8 @@ class DataSource(ProxyResource):
:param properties: Required. The data source properties in raw json
format, each kind of data source have it's own schema.
:type properties: object
:param e_tag: The ETag of the data source.
:type e_tag: str
:param etag: The ETag of the data source.
:type etag: str
:param kind: Required. Possible values include: 'WindowsEvent',
'WindowsPerformanceCounter', 'IISLogs', 'LinuxSyslog',
'LinuxSyslogCollection', 'LinuxPerformanceObject',
Expand Down Expand Up @@ -564,15 +564,15 @@ class DataSource(ProxyResource):
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'object'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}

def __init__(self, **kwargs):
super(DataSource, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
self.e_tag = kwargs.get('e_tag', None)
self.etag = kwargs.get('etag', None)
self.kind = kwargs.get('kind', None)
self.tags = kwargs.get('tags', None)

Expand Down Expand Up @@ -1086,8 +1086,8 @@ class SavedSearch(ProxyResource):
:ivar type: The type of the resource. Ex-
Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
:vartype type: str
:param e_tag: The ETag of the saved search.
:type e_tag: str
:param etag: The ETag of the saved search.
:type etag: str
:param category: Required. The category of the saved search. This helps
the user to find a saved search faster.
:type category: str
Expand Down Expand Up @@ -1123,7 +1123,7 @@ class SavedSearch(ProxyResource):
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'category': {'key': 'properties.category', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'query': {'key': 'properties.query', 'type': 'str'},
Expand All @@ -1135,7 +1135,7 @@ class SavedSearch(ProxyResource):

def __init__(self, **kwargs):
super(SavedSearch, self).__init__(**kwargs)
self.e_tag = kwargs.get('e_tag', None)
self.etag = kwargs.get('etag', None)
self.category = kwargs.get('category', None)
self.display_name = kwargs.get('display_name', None)
self.query = kwargs.get('query', None)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -528,8 +528,8 @@ class DataSource(ProxyResource):
:param properties: Required. The data source properties in raw json
format, each kind of data source have it's own schema.
:type properties: object
:param e_tag: The ETag of the data source.
:type e_tag: str
:param etag: The ETag of the data source.
:type etag: str
:param kind: Required. Possible values include: 'WindowsEvent',
'WindowsPerformanceCounter', 'IISLogs', 'LinuxSyslog',
'LinuxSyslogCollection', 'LinuxPerformanceObject',
Expand Down Expand Up @@ -564,15 +564,15 @@ class DataSource(ProxyResource):
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'object'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}

def __init__(self, *, properties, kind, e_tag: str=None, tags=None, **kwargs) -> None:
def __init__(self, *, properties, kind, etag: str=None, tags=None, **kwargs) -> None:
super(DataSource, self).__init__(**kwargs)
self.properties = properties
self.e_tag = e_tag
self.etag = etag
self.kind = kind
self.tags = tags

Expand Down Expand Up @@ -1086,8 +1086,8 @@ class SavedSearch(ProxyResource):
:ivar type: The type of the resource. Ex-
Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
:vartype type: str
:param e_tag: The ETag of the saved search.
:type e_tag: str
:param etag: The ETag of the saved search.
:type etag: str
:param category: Required. The category of the saved search. This helps
the user to find a saved search faster.
:type category: str
Expand Down Expand Up @@ -1123,7 +1123,7 @@ class SavedSearch(ProxyResource):
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'category': {'key': 'properties.category', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'query': {'key': 'properties.query', 'type': 'str'},
Expand All @@ -1133,9 +1133,9 @@ class SavedSearch(ProxyResource):
'tags': {'key': 'properties.tags', 'type': '[Tag]'},
}

def __init__(self, *, category: str, display_name: str, query: str, e_tag: str=None, function_alias: str=None, function_parameters: str=None, version: int=None, tags=None, **kwargs) -> None:
def __init__(self, *, category: str, display_name: str, query: str, etag: str=None, function_alias: str=None, function_parameters: str=None, version: int=None, tags=None, **kwargs) -> None:
super(SavedSearch, self).__init__(**kwargs)
self.e_tag = e_tag
self.etag = etag
self.category = category
self.display_name = display_name
self.query = query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@

import uuid
from msrest.pipeline import ClientRawResponse
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling

from .. import models

Expand Down Expand Up @@ -112,9 +110,31 @@ def internal_paging(next_link=None):
return deserialized
list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports'}


def _create_or_update_initial(
def create_or_update(
self, resource_group_name, workspace_name, data_export_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Create or update a data export.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param data_export_name: The data export rule name.
:type data_export_name: str
:param parameters: The parameters required to create or update a data
export.
:type parameters: ~azure.mgmt.loganalytics.models.DataExport
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DataExport or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.loganalytics.models.DataExport or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DataExportErrorResponseException<azure.mgmt.loganalytics.models.DataExportErrorResponseException>`
"""
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
Expand Down Expand Up @@ -151,7 +171,6 @@ def _create_or_update_initial(
raise models.DataExportErrorResponseException(self._deserialize, response)

deserialized = None

if response.status_code == 200:
deserialized = self._deserialize('DataExport', response)
if response.status_code == 201:
Expand All @@ -162,61 +181,6 @@ def _create_or_update_initial(
return client_raw_response

return deserialized

def create_or_update(
self, resource_group_name, workspace_name, data_export_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Create or update a data export.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param data_export_name: The data export rule name.
:type data_export_name: str
:param parameters: The parameters required to create or update a data
export.
:type parameters: ~azure.mgmt.loganalytics.models.DataExport
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns DataExport or
ClientRawResponse<DataExport> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.loganalytics.models.DataExport]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.loganalytics.models.DataExport]]
:raises:
:class:`DataExportErrorResponseException<azure.mgmt.loganalytics.models.DataExportErrorResponseException>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
data_export_name=data_export_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)

def get_long_running_output(response):
deserialized = self._deserialize('DataExport', response)

if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response

return deserialized

lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}'}

def get(
Expand Down Expand Up @@ -269,7 +233,7 @@ def get(
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)

if response.status_code not in [200, 404]:
if response.status_code not in [200]:
raise models.DataExportErrorResponseException(self._deserialize, response)

deserialized = None
Expand Down

0 comments on commit da7f0aa

Please sign in to comment.