From 70fb01ba77c98dc8950984ab9f509a792d9dc933 Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:07:51 -0800 Subject: [PATCH] [AutoRelease] t2-loganalytics-2024-11-04-45063(can only be merged by SDK owner) (#38292) * code and test * update-testcase * fix generated samples * update version --------- Co-authored-by: azure-sdk Co-authored-by: ChenxiJiang333 --- .../azure-mgmt-loganalytics/CHANGELOG.md | 6 + .../azure-mgmt-loganalytics/README.md | 55 +- .../azure-mgmt-loganalytics/_meta.json | 10 +- .../azure-mgmt-loganalytics/assets.json | 6 - .../azure/mgmt/loganalytics/_configuration.py | 7 +- .../_log_analytics_management_client.py | 31 +- .../azure/mgmt/loganalytics/_patch.py | 1 + .../azure/mgmt/loganalytics/_serialization.py | 197 ++++--- .../azure/mgmt/loganalytics/_vendor.py | 27 - .../azure/mgmt/loganalytics/_version.py | 2 +- .../mgmt/loganalytics/aio/_configuration.py | 7 +- .../aio/_log_analytics_management_client.py | 33 +- .../azure/mgmt/loganalytics/aio/_patch.py | 1 + .../_available_service_tiers_operations.py | 38 +- .../aio/operations/_clusters_operations.py | 347 +++++------- .../operations/_data_exports_operations.py | 131 ++--- .../operations/_data_sources_operations.py | 131 ++--- .../_deleted_workspaces_operations.py | 70 +-- .../aio/operations/_gateways_operations.py | 34 +- .../_intelligence_packs_operations.py | 78 +-- .../operations/_linked_services_operations.py | 224 +++----- .../_linked_storage_accounts_operations.py | 129 ++--- .../_management_groups_operations.py | 43 +- .../_operation_statuses_operations.py | 38 +- .../aio/operations/_operations.py | 41 +- .../aio/operations/_queries_operations.py | 219 +++----- .../aio/operations/_query_packs_operations.py | 240 ++++----- .../operations/_saved_searches_operations.py | 126 ++--- .../aio/operations/_schema_operations.py | 38 +- .../aio/operations/_shared_keys_operations.py | 62 +-- .../_storage_insight_configs_operations.py | 129 ++--- .../aio/operations/_tables_operations.py | 353 +++++------- .../aio/operations/_usages_operations.py | 43 +- .../operations/_workspace_purge_operations.py | 84 ++- .../aio/operations/_workspaces_operations.py | 300 +++++------ .../mgmt/loganalytics/models/__init__.py | 6 + .../_log_analytics_management_client_enums.py | 91 ++-- .../mgmt/loganalytics/models/_models_py3.py | 508 +++++++++++------- .../_available_service_tiers_operations.py | 42 +- .../operations/_clusters_operations.py | 371 +++++-------- .../operations/_data_exports_operations.py | 147 ++--- .../operations/_data_sources_operations.py | 147 ++--- .../_deleted_workspaces_operations.py | 78 ++- .../operations/_gateways_operations.py | 38 +- .../_intelligence_packs_operations.py | 90 ++-- .../operations/_linked_services_operations.py | 240 ++++----- .../_linked_storage_accounts_operations.py | 145 ++--- .../_management_groups_operations.py | 47 +- .../_operation_statuses_operations.py | 42 +- .../loganalytics/operations/_operations.py | 43 +- .../operations/_queries_operations.py | 243 ++++----- .../operations/_query_packs_operations.py | 270 ++++------ .../operations/_saved_searches_operations.py | 142 ++--- .../operations/_schema_operations.py | 42 +- .../operations/_shared_keys_operations.py | 70 +-- .../_storage_insight_configs_operations.py | 145 ++--- .../operations/_tables_operations.py | 381 +++++-------- .../operations/_usages_operations.py | 47 +- .../operations/_workspace_purge_operations.py | 92 ++-- .../operations/_workspaces_operations.py | 324 +++++------ .../dev_requirements.txt | 3 +- .../generated_samples/clusters_create.py | 5 +- .../generated_samples/clusters_delete.py | 8 +- .../generated_samples/clusters_get.py | 5 +- .../clusters_list_by_resource_group.py | 5 +- .../clusters_subscription_list.py | 5 +- .../generated_samples/clusters_update.py | 9 +- .../data_export_create_or_update.py | 1 + .../generated_samples/data_export_delete.py | 4 +- .../generated_samples/data_export_get.py | 1 + .../data_export_list_by_workspace.py | 1 + .../generated_samples/data_sources_create.py | 1 + .../generated_samples/data_sources_delete.py | 4 +- .../generated_samples/data_sources_get.py | 1 + .../data_sources_list_by_workspace.py | 1 + .../linked_services_create.py | 1 + .../linked_services_delete.py | 1 + .../generated_samples/linked_services_get.py | 1 + .../linked_services_list_by_workspace.py | 1 + .../linked_storage_accounts_create.py | 1 + .../linked_storage_accounts_delete.py | 4 +- .../linked_storage_accounts_get.py | 1 + ...nked_storage_accounts_list_by_workspace.py | 1 + .../operation_statuses_get.py | 1 + .../operations_list_by_tenant.py | 1 + .../query_pack_queries_delete.py | 4 +- .../query_pack_queries_get.py | 1 + .../query_pack_queries_list.py | 1 + .../generated_samples/query_packs_delete.py | 4 +- .../generated_samples/query_packs_get.py | 1 + .../generated_samples/query_packs_list.py | 1 + .../query_packs_list_by_resource_group.py | 1 + .../saved_searches_get_schema.py | 1 + .../saved_searches_list_by_workspace.py | 1 + .../storage_insights_create_or_update.py | 1 + .../storage_insights_delete.py | 4 +- .../generated_samples/storage_insights_get.py | 1 + .../storage_insights_list_by_workspace.py | 1 + .../generated_samples/tables_delete.py | 4 +- .../generated_samples/tables_get.py | 1 + .../generated_samples/tables_list.py | 1 + .../generated_samples/tables_migrate.py | 4 +- .../generated_samples/tables_search_cancel.py | 4 +- .../generated_samples/tables_upsert.py | 3 +- .../workspaces_available_service_tiers.py | 1 + .../generated_samples/workspaces_create.py | 1 + .../generated_samples/workspaces_delete.py | 4 +- .../workspaces_delete_saved_searches.py | 4 +- .../workspaces_disable_intelligence_pack.py | 4 +- .../workspaces_enable_intelligence_pack.py | 4 +- .../workspaces_gateways_delete.py | 4 +- .../generated_samples/workspaces_get.py | 1 + .../workspaces_get_shared_keys.py | 1 + .../workspaces_list_by_resource_group.py | 1 + .../workspaces_list_intelligence_packs.py | 1 + .../workspaces_list_management_groups.py | 1 + .../workspaces_list_usages.py | 1 + .../generated_samples/workspaces_purge.py | 1 + .../workspaces_purge_operation.py | 1 + .../workspaces_purge_resource_id.py | 1 + .../workspaces_regenerate_shared_keys.py | 1 + ...kspaces_saved_searches_create_or_update.py | 1 + .../workspaces_saved_searches_get.py | 1 + .../workspaces_subscription_list.py | 3 +- .../generated_samples/workspaces_update.py | 1 + .../generated_tests/conftest.py | 41 ++ ...ment_available_service_tiers_operations.py | 31 ++ ...vailable_service_tiers_operations_async.py | 32 ++ ...nalytics_management_clusters_operations.py | 126 +++++ ...cs_management_clusters_operations_async.py | 143 +++++ ...tics_management_data_exports_operations.py | 82 +++ ...anagement_data_exports_operations_async.py | 83 +++ ...tics_management_data_sources_operations.py | 80 +++ ...anagement_data_sources_operations_async.py | 81 +++ ...anagement_deleted_workspaces_operations.py | 40 ++ ...ent_deleted_workspaces_operations_async.py | 41 ++ ...nalytics_management_gateways_operations.py | 32 ++ ...cs_management_gateways_operations_async.py | 33 ++ ...anagement_intelligence_packs_operations.py | 57 ++ ...ent_intelligence_packs_operations_async.py | 58 ++ ...s_management_linked_services_operations.py | 79 +++ ...gement_linked_services_operations_async.py | 84 +++ ...ment_linked_storage_accounts_operations.py | 77 +++ ...inked_storage_accounts_operations_async.py | 78 +++ ...management_management_groups_operations.py | 31 ++ ...ment_management_groups_operations_async.py | 32 ++ ...anagement_operation_statuses_operations.py | 31 ++ ...ent_operation_statuses_operations_async.py | 32 ++ ...est_log_analytics_management_operations.py | 29 + ...g_analytics_management_operations_async.py | 30 ++ ...analytics_management_queries_operations.py | 143 +++++ ...ics_management_queries_operations_async.py | 144 +++++ ...ytics_management_query_packs_operations.py | 122 +++++ ...management_query_packs_operations_async.py | 123 +++++ ...cs_management_saved_searches_operations.py | 83 +++ ...agement_saved_searches_operations_async.py | 84 +++ ..._analytics_management_schema_operations.py | 31 ++ ...tics_management_schema_operations_async.py | 32 ++ ...ytics_management_shared_keys_operations.py | 43 ++ ...management_shared_keys_operations_async.py | 44 ++ ...ment_storage_insight_configs_operations.py | 81 +++ ...torage_insight_configs_operations_async.py | 82 +++ ..._analytics_management_tables_operations.py | 249 +++++++++ ...tics_management_tables_operations_async.py | 256 +++++++++ ..._analytics_management_usages_operations.py | 31 ++ ...tics_management_usages_operations_async.py | 32 ++ ...s_management_workspace_purge_operations.py | 45 ++ ...gement_workspace_purge_operations_async.py | 46 ++ ...lytics_management_workspaces_operations.py | 161 ++++++ ..._management_workspaces_operations_async.py | 170 ++++++ .../sdk_packaging.toml | 2 +- .../azure-mgmt-loganalytics/setup.py | 88 +-- .../azure-mgmt-loganalytics/tests/conftest.py | 69 +-- ...eleted_workspaces_operations_async_test.py | 36 ++ ...ment_deleted_workspaces_operations_test.py | 35 ++ ...lytics_management_operations_async_test.py | 27 + ...og_analytics_management_operations_test.py | 26 + ...gement_workspaces_operations_async_test.py | 36 ++ ...s_management_workspaces_operations_test.py | 35 ++ .../tests/test_mgmt_loganalytics.py | 16 - ...st_workspace.py => test_workspace_test.py} | 21 +- 181 files changed, 6801 insertions(+), 4271 deletions(-) delete mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/assets.json delete mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_vendor.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/conftest.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations_async.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_deleted_workspaces_operations_async_test.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_deleted_workspaces_operations_test.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_operations_async_test.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_operations_test.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_workspaces_operations_async_test.py create mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_workspaces_operations_test.py delete mode 100644 sdk/loganalytics/azure-mgmt-loganalytics/tests/test_mgmt_loganalytics.py rename sdk/loganalytics/azure-mgmt-loganalytics/tests/{test_workspace.py => test_workspace_test.py} (50%) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md b/sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md index 3f57822fb6ef..9d1e2c8c0233 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md +++ b/sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md @@ -1,5 +1,11 @@ # Release History +## 13.0.0b7 (2024-11-05) + +### Other Changes + + - Update dependencies + ## 13.0.0b6 (2022-12-12) ### Features Added diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/README.md b/sdk/loganalytics/azure-mgmt-loganalytics/README.md index 70b2c31225b1..a01be4617435 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/README.md +++ b/sdk/loganalytics/azure-mgmt-loganalytics/README.md @@ -1,28 +1,61 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Log Analytics Management Client Library. -This package has been tested with Python 3.7+. +This package has been tested with Python 3.8+. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). ## _Disclaimer_ _Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For more information and questions, please refer to https://github.com/Azure/azure-sdk-for-python/issues/20691_ -# Usage +## Getting started +### Prerequisites -To learn how to use this package, see the [quickstart guide](https://aka.ms/azsdk/python/mgmt) - -For docs and references, see [Python SDK References](https://docs.microsoft.com/python/api/overview/azure/) -Code samples for this package can be found at [Log Analytics Management](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com. -Additional code samples for different Azure services are available at [Samples Repo](https://github.com/Azure-Samples/azure-samples-python-management/tree/main/samples/loganalytics) +- Python 3.8+ is required to use this package. +- [Azure subscription](https://azure.microsoft.com/free/) +### Install the package -# Provide Feedback +```bash +pip install azure-mgmt-loganalytics +pip install azure-identity +``` + +### Authentication + +By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configure of following environment variables. + +- `AZURE_CLIENT_ID` for Azure client ID. +- `AZURE_TENANT_ID` for Azure tenant ID. +- `AZURE_CLIENT_SECRET` for Azure client secret. + +In addition, Azure subscription ID can be configured via environment variable `AZURE_SUBSCRIPTION_ID`. + +With above configuration, client can be authenticated by following code: + +```python +from azure.identity import DefaultAzureCredential +from azure.mgmt.loganalytics import LogAnalyticsManagementClient +import os + +sub_id = os.getenv("AZURE_SUBSCRIPTION_ID") +client = LogAnalyticsManagementClient(credential=DefaultAzureCredential(), subscription_id=sub_id) +``` + +## Examples + +Code samples for this package can be found at: +- [Search Log Analytics Management](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com +- [Azure Python Mgmt SDK Samples Repo](https://aka.ms/azsdk/python/mgmt/samples) + + +## Troubleshooting + +## Next steps + +## Provide Feedback If you encounter any bugs or have suggestions, please file an issue in the [Issues](https://github.com/Azure/azure-sdk-for-python/issues) section of the project. - - -![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-mgmt-loganalytics%2FREADME.png) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json b/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json index d9403e22f5b1..5edd9181339e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json +++ b/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json @@ -1,11 +1,11 @@ { - "commit": "23b62d4e4dab07dccda851cfe50f6c6afb705a3b", + "commit": "690a7656d65a03b134fa44fc7bb8013dc18a15b5", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest": "3.9.2", + "autorest": "3.10.2", "use": [ - "@autorest/python@6.2.7", - "@autorest/modelerfour@4.24.3" + "@autorest/python@6.19.0", + "@autorest/modelerfour@4.27.0" ], - "autorest_command": "autorest specification/operationalinsights/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.2.7 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False", + "autorest_command": "autorest specification/operationalinsights/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/azure-sdk-for-python/sdk --tag=package-2022-10 --use=@autorest/python@6.19.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", "readme": "specification/operationalinsights/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/assets.json b/sdk/loganalytics/azure-mgmt-loganalytics/assets.json deleted file mode 100644 index a9b4e6064c1a..000000000000 --- a/sdk/loganalytics/azure-mgmt-loganalytics/assets.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "AssetsRepo": "Azure/azure-sdk-assets", - "AssetsRepoPrefixPath": "python", - "TagPrefix": "python/loganalytics/azure-mgmt-loganalytics", - "Tag": "python/loganalytics/azure-mgmt-loganalytics_23ac9c2022" -} diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_configuration.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_configuration.py index ada997fdcad2..4adc03491fa7 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_configuration.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_configuration.py @@ -8,7 +8,6 @@ from typing import Any, TYPE_CHECKING -from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy @@ -19,7 +18,7 @@ from azure.core.credentials import TokenCredential -class LogAnalyticsManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes +class LogAnalyticsManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for LogAnalyticsManagementClient. Note that all parameters used to create this instance are saved as instance @@ -32,7 +31,6 @@ class LogAnalyticsManagementClientConfiguration(Configuration): # pylint: disab """ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: - super(LogAnalyticsManagementClientConfiguration, self).__init__(**kwargs) if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: @@ -42,6 +40,7 @@ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs self.subscription_id = subscription_id self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-loganalytics/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: @@ -50,9 +49,9 @@ def _configure(self, **kwargs: Any) -> None: self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: self.authentication_policy = ARMChallengeAuthenticationPolicy( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py index 1ede1e222bb9..7b63f83d07d8 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py @@ -8,9 +8,12 @@ from copy import deepcopy from typing import Any, TYPE_CHECKING +from typing_extensions import Self +from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse from azure.mgmt.core import ARMPipelineClient +from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy from . import models as _models from ._configuration import LogAnalyticsManagementClientConfiguration @@ -115,7 +118,25 @@ def __init__( self._config = LogAnalyticsManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + ARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, policies=_policies, **kwargs) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) @@ -158,7 +179,7 @@ def __init__( ) self.tables = TablesOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: + def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -178,14 +199,14 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, **kwargs) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore def close(self) -> None: self._client.close() - def __enter__(self) -> "LogAnalyticsManagementClient": + def __enter__(self) -> Self: self._client.__enter__() return self - def __exit__(self, *exc_details) -> None: + def __exit__(self, *exc_details: Any) -> None: self._client.__exit__(*exc_details) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_patch.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_patch.py index f99e77fef986..17dbc073e01b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_patch.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_patch.py @@ -25,6 +25,7 @@ # # -------------------------------------------------------------------------- + # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_serialization.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_serialization.py index 2c170e28dbca..8139854b97bb 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_serialization.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_serialization.py @@ -38,7 +38,22 @@ import re import sys import codecs -from typing import Optional, Union, AnyStr, IO, Mapping +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + TypeVar, + MutableMapping, + Type, + List, + Mapping, +) try: from urllib import quote # type: ignore @@ -48,12 +63,14 @@ import isodate # type: ignore -from typing import Dict, Any, cast - -from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") +ModelType = TypeVar("ModelType", bound="Model") +JSON = MutableMapping[str, Any] + class RawDeserializer: @@ -107,7 +124,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: pass return ET.fromstring(data_as_str) # nosec - except ET.ParseError: + except ET.ParseError as err: # It might be because the server has an issue, and returned JSON with # content-type XML.... # So let's try a JSON load, and if it's still broken @@ -126,7 +143,9 @@ def _json_attemp(data): # The function hack is because Py2.7 messes up with exception # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") - raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod @@ -153,13 +172,6 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], return None -try: - basestring # type: ignore - unicode_str = unicode # type: ignore -except NameError: - basestring = str - unicode_str = str - _LOGGER = logging.getLogger(__name__) try: @@ -277,8 +289,8 @@ class Model(object): _attribute_map: Dict[str, Dict[str, Any]] = {} _validation: Dict[str, Dict[str, Any]] = {} - def __init__(self, **kwargs): - self.additional_properties = {} + def __init__(self, **kwargs: Any) -> None: + self.additional_properties: Optional[Dict[str, Any]] = {} for k in kwargs: if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) @@ -287,25 +299,25 @@ def __init__(self, **kwargs): else: setattr(self, k, kwargs[k]) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: """Compare objects by comparing all attributes.""" if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False - def __ne__(self, other): + def __ne__(self, other: Any) -> bool: """Compare objects by comparing all attributes.""" return not self.__eq__(other) - def __str__(self): + def __str__(self) -> str: return str(self.__dict__) @classmethod - def enable_additional_properties_sending(cls): + def enable_additional_properties_sending(cls) -> None: cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} @classmethod - def is_xml_model(cls): + def is_xml_model(cls) -> bool: try: cls._xml_map # type: ignore except AttributeError: @@ -322,8 +334,8 @@ def _create_xml_node(cls): return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) - def serialize(self, keep_readonly=False, **kwargs): - """Return the JSON that would be sent to azure from this model. + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. @@ -334,10 +346,15 @@ def serialize(self, keep_readonly=False, **kwargs): :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) + return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore - def as_dict(self, keep_readonly=True, key_transformer=attribute_transformer, **kwargs): - """Return a dict that can be JSONify using json.dump. + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. Advanced usage might optionally use a callback as parameter: @@ -368,7 +385,7 @@ def my_key_transformer(key, attr_desc, value): :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) + return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore @classmethod def _infer_class_models(cls): @@ -384,7 +401,7 @@ def _infer_class_models(cls): return client_models @classmethod - def deserialize(cls, data, content_type=None): + def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. @@ -393,10 +410,15 @@ def deserialize(cls, data, content_type=None): :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) - return deserializer(cls.__name__, data, content_type=content_type) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod - def from_dict(cls, data, key_extractors=None, content_type=None): + def from_dict( + cls: Type[ModelType], + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> ModelType: """Parse a dict using given key extractor return a model. By default consider key @@ -409,8 +431,8 @@ def from_dict(cls, data, key_extractors=None, content_type=None): :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) - deserializer.key_extractors = ( - [ + deserializer.key_extractors = ( # type: ignore + [ # type: ignore attribute_key_case_insensitive_extractor, rest_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, @@ -418,7 +440,7 @@ def from_dict(cls, data, key_extractors=None, content_type=None): if key_extractors is None else key_extractors ) - return deserializer(cls.__name__, data, content_type=content_type) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def _flatten_subtype(cls, key, objects): @@ -518,7 +540,7 @@ class Serializer(object): "multiple": lambda x, y: x % y != 0, } - def __init__(self, classes=None): + def __init__(self, classes: Optional[Mapping[str, type]] = None): self.serialize_type = { "iso-8601": Serializer.serialize_iso, "rfc-1123": Serializer.serialize_rfc, @@ -534,7 +556,7 @@ def __init__(self, classes=None): "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies = dict(classes) if classes else {} + self.dependencies: Dict[str, type] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True @@ -602,7 +624,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if xml_desc.get("attr", False): if xml_ns: ET.register_namespace(xml_prefix, xml_ns) - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) serialized.set(xml_name, new_attr) # type: ignore continue if xml_desc.get("text", False): @@ -622,12 +644,11 @@ def _serialize(self, target_obj, data_type=None, **kwargs): else: # That's a basic type # Integrate namespace if necessary local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) - local_node.text = unicode_str(new_attr) + local_node.text = str(new_attr) serialized.append(local_node) # type: ignore else: # JSON for k in reversed(keys): # type: ignore - unflattened = {k: new_attr} - new_attr = unflattened + new_attr = {k: new_attr} _new_attr = new_attr _serialized = serialized @@ -636,12 +657,13 @@ def _serialize(self, target_obj, data_type=None, **kwargs): _serialized.update(_new_attr) # type: ignore _new_attr = _new_attr[k] # type: ignore _serialized = _serialized[k] - except ValueError: - continue + except ValueError as err: + if isinstance(err, SerializationError): + raise except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) - raise_with_traceback(SerializationError, msg, err) + raise SerializationError(msg) from err else: return serialized @@ -656,8 +678,8 @@ def body(self, data, data_type, **kwargs): """ # Just in case this is a dict - internal_data_type = data_type.strip("[]{}") - internal_data_type = self.dependencies.get(internal_data_type, None) + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) try: is_xml_model_serialization = kwargs["is_xml"] except KeyError: @@ -683,7 +705,7 @@ def body(self, data, data_type, **kwargs): ] data = deserializer._deserialize(data_type, data) except DeserializationError as err: - raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + raise SerializationError("Unable to build a model: " + str(err)) from err return self._serialize(data, data_type, **kwargs) @@ -703,6 +725,7 @@ def url(self, name, data, data_type, **kwargs): if kwargs.get("skip_quote") is True: output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") except SerializationError: @@ -715,7 +738,9 @@ def query(self, name, data, data_type, **kwargs): :param data: The data to be serialized. :param str data_type: The type to be serialized from. - :rtype: str + :keyword bool skip_quote: Whether to skip quote the serialized result. + Defaults to False. + :rtype: str, list :raises: TypeError if serialization fails. :raises: ValueError if data is None """ @@ -723,10 +748,8 @@ def query(self, name, data, data_type, **kwargs): # Treat the list aside, since we don't want to encode the div separator if data_type.startswith("["): internal_data_type = data_type[1:-1] - data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] - if not kwargs.get("skip_quote", False): - data = [quote(str(d), safe="") for d in data] - return str(self.serialize_iter(data, internal_data_type, **kwargs)) + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) # Not a list, regular serialization output = self.serialize_data(data, data_type, **kwargs) @@ -777,6 +800,8 @@ def serialize_data(self, data, data_type, **kwargs): raise ValueError("No value for given attribute") try: + if data is CoreNull: + return None if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) @@ -795,7 +820,7 @@ def serialize_data(self, data, data_type, **kwargs): except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." - raise_with_traceback(SerializationError, msg.format(data, data_type), err) + raise SerializationError(msg.format(data, data_type)) from err else: return self._serialize(data, **kwargs) @@ -863,6 +888,8 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. + :keyword bool do_quote: Whether to quote the serialized result of each iterable element. + Defaults to False. :rtype: list, str """ if isinstance(data, str): @@ -875,9 +902,14 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): for d in data: try: serialized.append(self.serialize_data(d, iter_type, **kwargs)) - except ValueError: + except ValueError as err: + if isinstance(err, SerializationError): + raise serialized.append(None) + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + if div: serialized = ["" if s is None else str(s) for s in serialized] serialized = div.join(serialized) @@ -922,7 +954,9 @@ def serialize_dict(self, attr, dict_type, **kwargs): for key, value in attr.items(): try: serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) - except ValueError: + except ValueError as err: + if isinstance(err, SerializationError): + raise serialized[self.serialize_unicode(key)] = None if "xml" in serialization_ctxt: @@ -955,7 +989,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) if obj_type is _long_type: return self.serialize_long(attr) - if obj_type is unicode_str: + if obj_type is str: return self.serialize_unicode(attr) if obj_type is datetime.datetime: return self.serialize_iso(attr) @@ -1132,10 +1166,10 @@ def serialize_iso(attr, **kwargs): return date + microseconds + "Z" except (ValueError, OverflowError) as err: msg = "Unable to serialize datetime object." - raise_with_traceback(SerializationError, msg, err) + raise SerializationError(msg) from err except AttributeError as err: msg = "ISO-8601 object must be valid Datetime object." - raise_with_traceback(TypeError, msg, err) + raise TypeError(msg) from err @staticmethod def serialize_unix(attr, **kwargs): @@ -1161,7 +1195,8 @@ def rest_key_extractor(attr, attr_desc, data): working_data = data while "." in key: - dict_keys = _FLATTEN.split(key) + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break @@ -1170,7 +1205,6 @@ def rest_key_extractor(attr, attr_desc, data): if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well - # https://github.com/Azure/msrest-for-python/issues/197 return None key = ".".join(dict_keys[1:]) @@ -1191,7 +1225,6 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well - # https://github.com/Azure/msrest-for-python/issues/197 return None key = ".".join(dict_keys[1:]) @@ -1242,7 +1275,7 @@ def _extract_name_from_internal_type(internal_type): xml_name = internal_type_xml_map.get("name", internal_type.__name__) xml_ns = internal_type_xml_map.get("ns", None) if xml_ns: - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) return xml_name @@ -1266,7 +1299,7 @@ def xml_key_extractor(attr, attr_desc, data): # Integrate namespace if necessary xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) if xml_ns: - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) # If it's an attribute, that's simple if xml_desc.get("attr", False): @@ -1332,7 +1365,7 @@ class Deserializer(object): valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - def __init__(self, classes=None): + def __init__(self, classes: Optional[Mapping[str, type]] = None): self.deserialize_type = { "iso-8601": Deserializer.deserialize_iso, "rfc-1123": Deserializer.deserialize_rfc, @@ -1352,7 +1385,7 @@ def __init__(self, classes=None): "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies = dict(classes) if classes else {} + self.dependencies: Dict[str, type] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1405,12 +1438,12 @@ def _deserialize(self, target_obj, data): response, class_name = self._classify_target(target_obj, data) - if isinstance(response, basestring): + if isinstance(response, str): return self.deserialize_data(data, response) elif isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) - if data is None: + if data is None or data is CoreNull: return data try: attributes = response._attribute_map # type: ignore @@ -1442,7 +1475,7 @@ def _deserialize(self, target_obj, data): d_attrs[attr] = value except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: additional_properties = self._build_additional_properties(attributes, data) return self._instantiate_model(response, d_attrs, additional_properties) @@ -1471,22 +1504,22 @@ def _classify_target(self, target, data): Once classification has been determined, initialize object. :param str target: The target object type to deserialize to. - :param str/dict data: The response data to deseralize. + :param str/dict data: The response data to deserialize. """ if target is None: return None, None - if isinstance(target, basestring): + if isinstance(target, str): try: target = self.dependencies[target] except KeyError: return target, target try: - target = target._classify(data, self.dependencies) + target = target._classify(data, self.dependencies) # type: ignore except AttributeError: pass # Target is not a Model, no classify - return target, target.__class__.__name__ + return target, target.__class__.__name__ # type: ignore def failsafe_deserialize(self, target_obj, data, content_type=None): """Ignores any errors encountered in deserialization, @@ -1496,7 +1529,7 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): a deserialization error. :param str target_obj: The target object type to deserialize to. - :param str/dict data: The response data to deseralize. + :param str/dict data: The response data to deserialize. :param str content_type: Swagger "produces" if available. """ try: @@ -1539,7 +1572,7 @@ def _unpack_content(raw_data, content_type=None): if hasattr(raw_data, "_content_consumed"): return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) - if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore return raw_data @@ -1613,7 +1646,7 @@ def deserialize_data(self, data, data_type): except (ValueError, TypeError, AttributeError) as err: msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return self._deserialize(obj_type, data) @@ -1661,7 +1694,7 @@ def deserialize_object(self, attr, **kwargs): if isinstance(attr, ET.Element): # Do no recurse on XML, just return the tree as-is return attr - if isinstance(attr, basestring): + if isinstance(attr, str): return self.deserialize_basic(attr, "str") obj_type = type(attr) if obj_type in self.basic_types: @@ -1718,7 +1751,7 @@ def deserialize_basic(self, attr, data_type): if data_type == "bool": if attr in [True, False, 1, 0]: return bool(attr) - elif isinstance(attr, basestring): + elif isinstance(attr, str): if attr.lower() in ["true", "1"]: return True elif attr.lower() in ["false", "0"]: @@ -1769,7 +1802,6 @@ def deserialize_enum(data, enum_obj): data = data.value if isinstance(data, int): # Workaround. We might consider remove it in the future. - # https://github.com/Azure/azure-rest-api-specs/issues/141 try: return list(enum_obj.__members__.values())[data] except IndexError: @@ -1823,10 +1855,10 @@ def deserialize_decimal(attr): if isinstance(attr, ET.Element): attr = attr.text try: - return decimal.Decimal(attr) # type: ignore + return decimal.Decimal(str(attr)) # type: ignore except decimal.DecimalException as err: msg = "Invalid decimal {}".format(attr) - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err @staticmethod def deserialize_long(attr): @@ -1854,7 +1886,7 @@ def deserialize_duration(attr): duration = isodate.parse_duration(attr) except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return duration @@ -1871,7 +1903,7 @@ def deserialize_date(attr): if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore raise DeserializationError("Date must have only digits and -. Received: %s" % attr) # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. - return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) @staticmethod def deserialize_time(attr): @@ -1906,7 +1938,7 @@ def deserialize_rfc(attr): date_obj = date_obj.astimezone(tz=TZ_UTC) except ValueError as err: msg = "Cannot deserialize to rfc datetime object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return date_obj @@ -1943,7 +1975,7 @@ def deserialize_iso(attr): raise OverflowError("Hit max or min date") except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return date_obj @@ -1959,9 +1991,10 @@ def deserialize_unix(attr): if isinstance(attr, ET.Element): attr = int(attr.text) # type: ignore try: + attr = int(attr) date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) except ValueError as err: msg = "Cannot deserialize to unix datetime object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return date_obj diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_vendor.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_vendor.py deleted file mode 100644 index 9aad73fc743e..000000000000 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_vendor.py +++ /dev/null @@ -1,27 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request - - -def _format_url_section(template, **kwargs): - components = template.split("/") - while components: - try: - return template.format(**kwargs) - except KeyError as key: - formatted_components = template.split("/") - components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] - template = "/".join(components) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py index a8e2bea671e9..e43de950c09c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "13.0.0b6" +VERSION = "13.0.0b7" diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_configuration.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_configuration.py index 628ca59537b2..7d70ad5d4b55 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_configuration.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_configuration.py @@ -8,7 +8,6 @@ from typing import Any, TYPE_CHECKING -from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy @@ -19,7 +18,7 @@ from azure.core.credentials_async import AsyncTokenCredential -class LogAnalyticsManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes +class LogAnalyticsManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for LogAnalyticsManagementClient. Note that all parameters used to create this instance are saved as instance @@ -32,7 +31,6 @@ class LogAnalyticsManagementClientConfiguration(Configuration): # pylint: disab """ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: - super(LogAnalyticsManagementClientConfiguration, self).__init__(**kwargs) if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: @@ -42,6 +40,7 @@ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **k self.subscription_id = subscription_id self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-loganalytics/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: @@ -50,9 +49,9 @@ def _configure(self, **kwargs: Any) -> None: self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py index cdfaa6945e11..12fcfbaab165 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py @@ -8,9 +8,12 @@ from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self +from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient +from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy from .. import models as _models from .._serialization import Deserializer, Serializer @@ -115,7 +118,25 @@ def __init__( self._config = LogAnalyticsManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + AsyncARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, policies=_policies, **kwargs) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) @@ -158,7 +179,9 @@ def __init__( ) self.tables = TablesOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: + def _send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -178,14 +201,14 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncH request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, **kwargs) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "LogAnalyticsManagementClient": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self - async def __aexit__(self, *exc_details) -> None: + async def __aexit__(self, *exc_details: Any) -> None: await self._client.__aexit__(*exc_details) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_patch.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_patch.py index f99e77fef986..17dbc073e01b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_patch.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_patch.py @@ -25,6 +25,7 @@ # # -------------------------------------------------------------------------- + # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_available_service_tiers_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_available_service_tiers_operations.py index a0b4887b85bb..92aec8106a28 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_available_service_tiers_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_available_service_tiers_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Optional, TypeVar +from typing import Any, Callable, Dict, List, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._available_service_tiers_operations import build_list_by_workspace_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -66,12 +64,11 @@ async def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: list of AvailableServiceTier or the result of cls(response) :rtype: list[~azure.mgmt.loganalytics.models.AvailableServiceTier] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -82,23 +79,22 @@ async def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[List[_models.AvailableServiceTier]] = kwargs.pop("cls", None) - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -107,13 +103,9 @@ async def list_by_workspace( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("[AvailableServiceTier]", pipeline_response) + deserialized = self._deserialize("[AvailableServiceTier]", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/availableServiceTiers" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_clusters_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_clusters_operations.py index 817c27022622..bcb536061b23 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_clusters_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_clusters_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +17,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +31,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._clusters_operations import ( build_create_or_update_request, build_delete_request, @@ -39,10 +40,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -73,7 +74,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Cluster or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -81,10 +81,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -95,23 +95,20 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) @@ -121,10 +118,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -137,15 +135,10 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters" - } - @distributed_trace def list(self, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: """Gets the Log Analytics clusters in a subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Cluster or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -153,10 +146,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -167,22 +160,19 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) @@ -192,10 +182,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -208,12 +199,10 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/clusters"} - async def _create_or_update_initial( - self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO], **kwargs: Any - ) -> Optional[_models.Cluster]: - error_map = { + self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO[bytes]], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -224,19 +213,19 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Cluster]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Cluster") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, @@ -244,39 +233,34 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Cluster", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return deserialized # type: ignore @overload async def begin_create_or_update( @@ -301,14 +285,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Cluster] @@ -320,7 +296,7 @@ async def begin_create_or_update( self, resource_group_name: str, cluster_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -334,18 +310,10 @@ async def begin_create_or_update( :type cluster_name: str :param parameters: The parameters required to create or update a Log Analytics cluster. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Cluster] @@ -354,7 +322,7 @@ async def begin_create_or_update( @distributed_trace_async async def begin_create_or_update( - self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO], **kwargs: Any + self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[_models.Cluster]: """Create or update a Log Analytics cluster. @@ -364,19 +332,8 @@ async def begin_create_or_update( :param cluster_name: The name of the Log Analytics cluster. Required. :type cluster_name: str :param parameters: The parameters required to create or update a Log Analytics cluster. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.Cluster or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + either a Cluster type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.Cluster or IO[bytes] :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Cluster] @@ -385,7 +342,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -403,12 +360,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -418,22 +376,18 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.Cluster].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return AsyncLROPoller[_models.Cluster]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: - error_map = { + async def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -444,38 +398,42 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -486,14 +444,6 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar :type resource_group_name: str :param cluster_name: Name of the Log Analytics Cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -501,13 +451,13 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -516,11 +466,12 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -529,17 +480,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace_async async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _models.Cluster: @@ -550,12 +497,11 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) :type resource_group_name: str :param cluster_name: Name of the Log Analytics Cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Cluster or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Cluster :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -566,23 +512,22 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -592,21 +537,21 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return deserialized # type: ignore async def _update_initial( - self, resource_group_name: str, cluster_name: str, parameters: Union[_models.ClusterPatch, IO], **kwargs: Any - ) -> _models.Cluster: - error_map = { + self, + resource_group_name: str, + cluster_name: str, + parameters: Union[_models.ClusterPatch, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -617,19 +562,19 @@ async def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ClusterPatch") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, @@ -637,34 +582,34 @@ async def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return deserialized # type: ignore @overload async def begin_update( @@ -688,14 +633,6 @@ async def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Cluster] @@ -707,7 +644,7 @@ async def begin_update( self, resource_group_name: str, cluster_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -720,18 +657,10 @@ async def begin_update( :param cluster_name: Name of the Log Analytics Cluster. Required. :type cluster_name: str :param parameters: The parameters required to patch a Log Analytics cluster. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Cluster] @@ -740,7 +669,11 @@ async def begin_update( @distributed_trace_async async def begin_update( - self, resource_group_name: str, cluster_name: str, parameters: Union[_models.ClusterPatch, IO], **kwargs: Any + self, + resource_group_name: str, + cluster_name: str, + parameters: Union[_models.ClusterPatch, IO[bytes]], + **kwargs: Any ) -> AsyncLROPoller[_models.Cluster]: """Updates a Log Analytics cluster. @@ -749,20 +682,9 @@ async def begin_update( :type resource_group_name: str :param cluster_name: Name of the Log Analytics Cluster. Required. :type cluster_name: str - :param parameters: The parameters required to patch a Log Analytics cluster. Is either a model - type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.ClusterPatch or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: The parameters required to patch a Log Analytics cluster. Is either a + ClusterPatch type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.ClusterPatch or IO[bytes] :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Cluster] @@ -771,7 +693,7 @@ async def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -789,12 +711,13 @@ async def begin_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -804,14 +727,12 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.Cluster].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return AsyncLROPoller[_models.Cluster]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_exports_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_exports_operations.py index 636fc528b13e..12188cb7a4b2 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_exports_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_exports_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,15 +20,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._data_exports_operations import ( build_create_or_update_request, build_delete_request, @@ -35,10 +34,10 @@ build_list_by_workspace_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -73,7 +72,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataExport or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.DataExport] :raises ~azure.core.exceptions.HttpResponseError: @@ -81,10 +79,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.DataExportListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -95,24 +93,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("DataExportListResult", pipeline_response) @@ -122,10 +117,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -138,10 +134,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports" - } - @overload async def create_or_update( self, @@ -167,7 +159,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataExport or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: @@ -179,7 +170,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, data_export_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -194,11 +185,10 @@ async def create_or_update( :param data_export_name: The data export rule name. Required. :type data_export_name: str :param parameters: The parameters required to create or update a data export. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataExport or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: @@ -210,7 +200,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, data_export_name: str, - parameters: Union[_models.DataExport, IO], + parameters: Union[_models.DataExport, IO[bytes]], **kwargs: Any ) -> _models.DataExport: """Create or update a data export. @@ -222,18 +212,14 @@ async def create_or_update( :type workspace_name: str :param data_export_name: The data export rule name. Required. :type data_export_name: str - :param parameters: The parameters required to create or update a data export. Is either a model - type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.DataExport or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: The parameters required to create or update a data export. Is either a + DataExport type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.DataExport or IO[bytes] :return: DataExport or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -244,19 +230,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataExport] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DataExport") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_export_name=data_export_name, @@ -265,15 +251,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -283,21 +268,13 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DataExport", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DataExport", pipeline_response) + deserialized = self._deserialize("DataExport", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}" - } - @distributed_trace_async async def get( self, resource_group_name: str, workspace_name: str, data_export_name: str, **kwargs: Any @@ -311,12 +288,11 @@ async def get( :type workspace_name: str :param data_export_name: The data export rule name. Required. :type data_export_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataExport or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -327,24 +303,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.DataExport] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_export_name=data_export_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -354,16 +329,12 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataExport", pipeline_response) + deserialized = self._deserialize("DataExport", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -378,12 +349,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param data_export_name: The data export rule name. Required. :type data_export_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -394,24 +364,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_export_name=data_export_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -422,8 +391,4 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}" - } + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_sources_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_sources_operations.py index 8253b7892bed..836f3487f420 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_sources_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_sources_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,15 +20,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._data_sources_operations import ( build_create_or_update_request, build_delete_request, @@ -35,10 +34,10 @@ build_list_by_workspace_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -87,7 +86,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataSource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: @@ -99,7 +97,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, data_source_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -114,11 +112,10 @@ async def create_or_update( :param data_source_name: The name of the datasource resource. Required. :type data_source_name: str :param parameters: The parameters required to create or update a datasource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataSource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: @@ -130,7 +127,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, data_source_name: str, - parameters: Union[_models.DataSource, IO], + parameters: Union[_models.DataSource, IO[bytes]], **kwargs: Any ) -> _models.DataSource: """Create or update a data source. @@ -142,18 +139,14 @@ async def create_or_update( :type workspace_name: str :param data_source_name: The name of the datasource resource. Required. :type data_source_name: str - :param parameters: The parameters required to create or update a datasource. Is either a model - type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.DataSource or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: The parameters required to create or update a datasource. Is either a + DataSource type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.DataSource or IO[bytes] :return: DataSource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -164,19 +157,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataSource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DataSource") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_name=data_source_name, @@ -185,15 +178,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -202,21 +194,13 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DataSource", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DataSource", pipeline_response) + deserialized = self._deserialize("DataSource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}" - } - @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, data_source_name: str, **kwargs: Any @@ -230,12 +214,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param data_source_name: Name of the datasource. Required. :type data_source_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -246,24 +229,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_name=data_source_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -273,11 +255,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async async def get( @@ -292,12 +270,11 @@ async def get( :type workspace_name: str :param data_source_name: Name of the datasource. Required. :type data_source_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataSource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -308,24 +285,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.DataSource] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_name=data_source_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -334,16 +310,12 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataSource", pipeline_response) + deserialized = self._deserialize("DataSource", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}" - } + return deserialized # type: ignore @distributed_trace def list_by_workspace( @@ -361,7 +333,6 @@ def list_by_workspace( :param skiptoken: Starting point of the collection of data source instances. Default value is None. :type skiptoken: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataSource or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.DataSource] :raises ~azure.core.exceptions.HttpResponseError: @@ -369,10 +340,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.DataSourceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -383,26 +354,23 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, filter=filter, skiptoken=skiptoken, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("DataSourceListResult", pipeline_response) @@ -412,10 +380,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -426,7 +395,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_deleted_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_deleted_workspaces_operations.py index bea0b060759a..483f6f97587a 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_deleted_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_deleted_workspaces_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,20 +19,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._deleted_workspaces_operations import build_list_by_resource_group_request, build_list_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -60,7 +58,6 @@ def __init__(self, *args, **kwargs) -> None: def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: """Gets recently deleted workspaces in a subscription, available for recovery. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -68,10 +65,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -82,22 +79,19 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) @@ -107,10 +101,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -123,8 +118,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/deletedWorkspaces"} - @distributed_trace def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: """Gets recently deleted workspaces in a resource group, available for recovery. @@ -132,7 +125,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -140,10 +132,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -154,23 +146,20 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) @@ -180,10 +169,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -195,7 +185,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/deletedWorkspaces" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_gateways_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_gateways_operations.py index fecbfb788bf3..67f55f918558 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_gateways_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_gateways_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._gateways_operations import build_delete_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -68,12 +66,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param gateway_id: The Log Analytics gateway Id. Required. :type gateway_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -84,24 +81,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, gateway_id=gateway_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -111,8 +107,4 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/gateways/{gatewayId}" - } + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_intelligence_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_intelligence_packs_operations.py index 5b9f17587b26..adfc3b321813 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_intelligence_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_intelligence_packs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Optional, TypeVar +from typing import Any, Callable, Dict, List, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._intelligence_packs_operations import build_disable_request, build_enable_request, build_list_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -68,12 +66,11 @@ async def disable( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param intelligence_pack_name: The name of the intelligence pack to be disabled. Required. :type intelligence_pack_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -84,24 +81,23 @@ async def disable( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_disable_request( + _request = build_disable_request( resource_group_name=resource_group_name, workspace_name=workspace_name, intelligence_pack_name=intelligence_pack_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.disable.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -111,11 +107,7 @@ async def disable( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - disable.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Disable" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async async def enable( # pylint: disable=inconsistent-return-statements @@ -130,12 +122,11 @@ async def enable( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param intelligence_pack_name: The name of the intelligence pack to be enabled. Required. :type intelligence_pack_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -146,24 +137,23 @@ async def enable( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_enable_request( + _request = build_enable_request( resource_group_name=resource_group_name, workspace_name=workspace_name, intelligence_pack_name=intelligence_pack_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.enable.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -173,11 +163,7 @@ async def enable( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - enable.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Enable" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async async def list( @@ -191,12 +177,11 @@ async def list( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: list of IntelligencePack or the result of cls(response) :rtype: list[~azure.mgmt.loganalytics.models.IntelligencePack] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -207,23 +192,22 @@ async def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[List[_models.IntelligencePack]] = kwargs.pop("cls", None) - request = build_list_request( + _request = build_list_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -232,13 +216,9 @@ async def list( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("[IntelligencePack]", pipeline_response) + deserialized = self._deserialize("[IntelligencePack]", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_services_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_services_operations.py index 6f1d3e48f2c6..fb464b3a9f0e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_services_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_services_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +17,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +31,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._linked_services_operations import ( build_create_or_update_request, build_delete_request, @@ -37,10 +38,10 @@ build_list_by_workspace_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -69,10 +70,10 @@ async def _create_or_update_initial( resource_group_name: str, workspace_name: str, linked_service_name: str, - parameters: Union[_models.LinkedService, IO], + parameters: Union[_models.LinkedService, IO[bytes]], **kwargs: Any - ) -> Optional[_models.LinkedService]: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -83,19 +84,19 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.LinkedService]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "LinkedService") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, linked_service_name=linked_service_name, @@ -104,38 +105,33 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("LinkedService", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return deserialized # type: ignore @overload async def begin_create_or_update( @@ -162,14 +158,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either LinkedService or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.LinkedService] @@ -182,7 +170,7 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, linked_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -197,18 +185,10 @@ async def begin_create_or_update( :param linked_service_name: Name of the linkedServices resource. Required. :type linked_service_name: str :param parameters: The parameters required to create or update a linked service. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either LinkedService or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.LinkedService] @@ -221,7 +201,7 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, linked_service_name: str, - parameters: Union[_models.LinkedService, IO], + parameters: Union[_models.LinkedService, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[_models.LinkedService]: """Create or update a linked service. @@ -234,19 +214,8 @@ async def begin_create_or_update( :param linked_service_name: Name of the linkedServices resource. Required. :type linked_service_name: str :param parameters: The parameters required to create or update a linked service. Is either a - model type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.LinkedService or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + LinkedService type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.LinkedService or IO[bytes] :return: An instance of AsyncLROPoller that returns either LinkedService or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.LinkedService] @@ -255,7 +224,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -274,12 +243,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = self._deserialize("LinkedService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -289,22 +259,20 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.LinkedService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return AsyncLROPoller[_models.LinkedService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) async def _delete_initial( self, resource_group_name: str, workspace_name: str, linked_service_name: str, **kwargs: Any - ) -> Optional[_models.LinkedService]: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -315,44 +283,42 @@ async def _delete_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) - cls: ClsType[Optional[_models.LinkedService]] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, linked_service_name=linked_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -367,14 +333,6 @@ async def begin_delete( :type workspace_name: str :param linked_service_name: Name of the linked service. Required. :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either LinkedService or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.LinkedService] @@ -383,7 +341,7 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -399,12 +357,13 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = self._deserialize("LinkedService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -414,17 +373,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.LinkedService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return AsyncLROPoller[_models.LinkedService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace_async async def get( @@ -439,12 +396,11 @@ async def get( :type workspace_name: str :param linked_service_name: Name of the linked service. Required. :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedService or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedService :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -455,24 +411,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, linked_service_name=linked_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -481,16 +436,12 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = self._deserialize("LinkedService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return deserialized # type: ignore @distributed_trace def list_by_workspace( @@ -503,7 +454,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LinkedService or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.LinkedService] :raises ~azure.core.exceptions.HttpResponseError: @@ -511,10 +461,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedServiceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -525,24 +475,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("LinkedServiceListResult", pipeline_response) @@ -552,10 +499,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -566,7 +514,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_storage_accounts_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_storage_accounts_operations.py index 1534741cb736..528e9c15a8cc 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_storage_accounts_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_storage_accounts_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,15 +20,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._linked_storage_accounts_operations import ( build_create_or_update_request, build_delete_request, @@ -35,10 +34,10 @@ build_list_by_workspace_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -90,7 +89,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedStorageAccountsResource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: @@ -102,7 +100,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, data_source_type: Union[str, _models.DataSourceType], - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -120,11 +118,10 @@ async def create_or_update( :type data_source_type: str or ~azure.mgmt.loganalytics.models.DataSourceType :param parameters: The parameters required to create or update linked storage accounts. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedStorageAccountsResource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: @@ -136,7 +133,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, data_source_type: Union[str, _models.DataSourceType], - parameters: Union[_models.LinkedStorageAccountsResource, IO], + parameters: Union[_models.LinkedStorageAccountsResource, IO[bytes]], **kwargs: Any ) -> _models.LinkedStorageAccountsResource: """Create or Update a link relation between current workspace and a group of storage accounts of a @@ -151,17 +148,13 @@ async def create_or_update( "AzureWatson", "Query", "Ingestion", and "Alerts". Required. :type data_source_type: str or ~azure.mgmt.loganalytics.models.DataSourceType :param parameters: The parameters required to create or update linked storage accounts. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + either a LinkedStorageAccountsResource type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource or IO[bytes] :return: LinkedStorageAccountsResource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -172,19 +165,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedStorageAccountsResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "LinkedStorageAccountsResource") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_type=data_source_type, @@ -193,15 +186,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -210,16 +202,12 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedStorageAccountsResource", pipeline_response) + deserialized = self._deserialize("LinkedStorageAccountsResource", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -240,12 +228,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :param data_source_type: Linked storage accounts type. Known values are: "CustomLogs", "AzureWatson", "Query", "Ingestion", and "Alerts". Required. :type data_source_type: str or ~azure.mgmt.loganalytics.models.DataSourceType - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -256,24 +243,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_type=data_source_type, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -283,11 +269,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async async def get( @@ -308,12 +290,11 @@ async def get( :param data_source_type: Linked storage accounts type. Known values are: "CustomLogs", "AzureWatson", "Query", "Ingestion", and "Alerts". Required. :type data_source_type: str or ~azure.mgmt.loganalytics.models.DataSourceType - :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedStorageAccountsResource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -324,24 +305,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedStorageAccountsResource] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_type=data_source_type, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -350,16 +330,12 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedStorageAccountsResource", pipeline_response) + deserialized = self._deserialize("LinkedStorageAccountsResource", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}" - } + return deserialized # type: ignore @distributed_trace def list_by_workspace( @@ -373,7 +349,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LinkedStorageAccountsResource or the result of cls(response) :rtype: @@ -383,10 +358,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedStorageAccountsListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -397,24 +372,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("LinkedStorageAccountsListResult", pipeline_response) @@ -424,10 +396,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -438,7 +411,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_management_groups_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_management_groups_operations.py index bb1c4a01a7bb..f182f16eb10e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_management_groups_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_management_groups_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,20 +19,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._management_groups_operations import build_list_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -67,7 +65,6 @@ def list( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ManagementGroup or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.ManagementGroup] @@ -76,10 +73,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.WorkspaceListManagementGroupsResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -90,24 +87,21 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListManagementGroupsResult", pipeline_response) @@ -117,10 +111,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -131,7 +126,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/managementGroups" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operation_statuses_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operation_statuses_operations.py index b11f236b079c..b19817fea735 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operation_statuses_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operation_statuses_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._operation_statuses_operations import build_get_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -63,12 +61,11 @@ async def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _m :type location: str :param async_operation_id: The operation Id. Required. :type async_operation_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: OperationStatus or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.OperationStatus :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -79,23 +76,22 @@ async def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _m _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.OperationStatus] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( location=location, async_operation_id=async_operation_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -104,13 +100,9 @@ async def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _m map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("OperationStatus", pipeline_response) + deserialized = self._deserialize("OperationStatus", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/locations/{location}/operationStatuses/{asyncOperationId}" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operations.py index aa3364ac61d4..18f251fe07b9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,20 +19,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._operations import build_list_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -60,7 +58,6 @@ def __init__(self, *args, **kwargs) -> None: def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: """Lists all of the available OperationalInsights Rest API operations. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Operation or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.Operation] :raises ~azure.core.exceptions.HttpResponseError: @@ -68,10 +65,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -82,21 +79,18 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("OperationListResult", pipeline_response) @@ -106,10 +100,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -121,5 +116,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list.metadata = {"url": "/providers/Microsoft.OperationalInsights/operations"} diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_queries_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_queries_operations.py index 90b8fad0fa6a..ee4c05d9271e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_queries_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_queries_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,15 +20,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._queries_operations import ( build_delete_request, build_get_request, @@ -37,10 +36,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -89,7 +88,6 @@ def list( :param skip_token: Base64 encoded token used to fetch the next page of items. Default is null. Default value is None. :type skip_token: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: @@ -99,10 +97,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPackQueryListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -113,7 +111,7 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, @@ -121,19 +119,16 @@ def prepare_request(next_link=None): include_body=include_body, skip_token=skip_token, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("LogAnalyticsQueryPackQueryListResult", pipeline_response) @@ -143,10 +138,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -159,10 +155,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries" - } - @overload def search( self, @@ -199,7 +191,6 @@ def search( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: @@ -212,7 +203,7 @@ def search( self, resource_group_name: str, query_pack_name: str, - query_search_properties: IO, + query_search_properties: IO[bytes], top: Optional[int] = None, include_body: Optional[bool] = None, skip_token: Optional[str] = None, @@ -230,7 +221,7 @@ def search( :type query_pack_name: str :param query_search_properties: Properties by which to search queries in the given Log Analytics QueryPack. Required. - :type query_search_properties: IO + :type query_search_properties: IO[bytes] :param top: Maximum items returned in page. Default value is None. :type top: int :param include_body: Flag indicating whether or not to return the body of each applicable @@ -242,7 +233,6 @@ def search( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: @@ -255,7 +245,7 @@ def search( self, resource_group_name: str, query_pack_name: str, - query_search_properties: Union[_models.LogAnalyticsQueryPackQuerySearchProperties, IO], + query_search_properties: Union[_models.LogAnalyticsQueryPackQuerySearchProperties, IO[bytes]], top: Optional[int] = None, include_body: Optional[bool] = None, skip_token: Optional[str] = None, @@ -270,9 +260,10 @@ def search( :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str :param query_search_properties: Properties by which to search queries in the given Log - Analytics QueryPack. Is either a model type or a IO type. Required. + Analytics QueryPack. Is either a LogAnalyticsQueryPackQuerySearchProperties type or a IO[bytes] + type. Required. :type query_search_properties: - ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuerySearchProperties or IO + ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuerySearchProperties or IO[bytes] :param top: Maximum items returned in page. Default value is None. :type top: int :param include_body: Flag indicating whether or not to return the body of each applicable @@ -281,10 +272,6 @@ def search( :param skip_token: Base64 encoded token used to fetch the next page of items. Default is null. Default value is None. :type skip_token: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: @@ -294,11 +281,11 @@ def search( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQueryListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -308,7 +295,7 @@ def search( content_type = content_type or "application/json" _json = None _content = None - if isinstance(query_search_properties, (IO, bytes)): + if isinstance(query_search_properties, (IOBase, bytes)): _content = query_search_properties else: _json = self._serialize.body(query_search_properties, "LogAnalyticsQueryPackQuerySearchProperties") @@ -316,7 +303,7 @@ def search( def prepare_request(next_link=None): if not next_link: - request = build_search_request( + _request = build_search_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, @@ -327,19 +314,16 @@ def prepare_request(next_link=None): content_type=content_type, json=_json, content=_content, - template_url=self.search.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("LogAnalyticsQueryPackQueryListResult", pipeline_response) @@ -349,10 +333,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -365,10 +350,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - search.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/search" - } - @distributed_trace_async async def get( self, resource_group_name: str, query_pack_name: str, id: str, **kwargs: Any @@ -382,12 +363,11 @@ async def get( :type query_pack_name: str :param id: The id of a specific query defined in the Log Analytics QueryPack. Required. :type id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -398,24 +378,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, id=id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -425,16 +404,12 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}" - } + return deserialized # type: ignore @overload async def put( @@ -462,7 +437,6 @@ async def put( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: @@ -474,7 +448,7 @@ async def put( resource_group_name: str, query_pack_name: str, id: str, - query_payload: IO, + query_payload: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -490,11 +464,10 @@ async def put( :type id: str :param query_payload: Properties that need to be specified to create a new query and add it to a Log Analytics QueryPack. Required. - :type query_payload: IO + :type query_payload: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: @@ -506,7 +479,7 @@ async def put( resource_group_name: str, query_pack_name: str, id: str, - query_payload: Union[_models.LogAnalyticsQueryPackQuery, IO], + query_payload: Union[_models.LogAnalyticsQueryPackQuery, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPackQuery: """Adds or Updates a specific Query within a Log Analytics QueryPack. @@ -519,17 +492,14 @@ async def put( :param id: The id of a specific query defined in the Log Analytics QueryPack. Required. :type id: str :param query_payload: Properties that need to be specified to create a new query and add it to - a Log Analytics QueryPack. Is either a model type or a IO type. Required. - :type query_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + a Log Analytics QueryPack. Is either a LogAnalyticsQueryPackQuery type or a IO[bytes] type. + Required. + :type query_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery or IO[bytes] :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -540,19 +510,19 @@ async def put( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(query_payload, (IO, bytes)): + if isinstance(query_payload, (IOBase, bytes)): _content = query_payload else: _json = self._serialize.body(query_payload, "LogAnalyticsQueryPackQuery") - request = build_put_request( + _request = build_put_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, id=id, @@ -561,15 +531,14 @@ async def put( content_type=content_type, json=_json, content=_content, - template_url=self.put.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -579,16 +548,12 @@ async def put( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - put.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}" - } + return deserialized # type: ignore @overload async def update( @@ -616,7 +581,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: @@ -628,7 +592,7 @@ async def update( resource_group_name: str, query_pack_name: str, id: str, - query_payload: IO, + query_payload: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -644,11 +608,10 @@ async def update( :type id: str :param query_payload: Properties that need to be specified to create a new query and add it to a Log Analytics QueryPack. Required. - :type query_payload: IO + :type query_payload: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: @@ -660,7 +623,7 @@ async def update( resource_group_name: str, query_pack_name: str, id: str, - query_payload: Union[_models.LogAnalyticsQueryPackQuery, IO], + query_payload: Union[_models.LogAnalyticsQueryPackQuery, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPackQuery: """Adds or Updates a specific Query within a Log Analytics QueryPack. @@ -673,17 +636,14 @@ async def update( :param id: The id of a specific query defined in the Log Analytics QueryPack. Required. :type id: str :param query_payload: Properties that need to be specified to create a new query and add it to - a Log Analytics QueryPack. Is either a model type or a IO type. Required. - :type query_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + a Log Analytics QueryPack. Is either a LogAnalyticsQueryPackQuery type or a IO[bytes] type. + Required. + :type query_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery or IO[bytes] :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -694,19 +654,19 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(query_payload, (IO, bytes)): + if isinstance(query_payload, (IOBase, bytes)): _content = query_payload else: _json = self._serialize.body(query_payload, "LogAnalyticsQueryPackQuery") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, id=id, @@ -715,15 +675,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -733,16 +692,12 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -757,12 +712,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type query_pack_name: str :param id: The id of a specific query defined in the Log Analytics QueryPack. Required. :type id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -773,24 +727,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, id=id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -801,8 +754,4 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}" - } + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_query_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_query_packs_operations.py index 386b87d4dad1..67fa93e86b11 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_query_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_query_packs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,15 +20,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._query_packs_operations import ( build_create_or_update_request, build_create_or_update_without_name_request, @@ -38,10 +37,10 @@ build_update_tags_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -69,7 +68,6 @@ def __init__(self, *args, **kwargs) -> None: def list(self, **kwargs: Any) -> AsyncIterable["_models.LogAnalyticsQueryPack"]: """Gets a list of all Log Analytics QueryPacks within a subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPack or the result of cls(response) :rtype: @@ -79,10 +77,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.LogAnalyticsQueryPack"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPackListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -93,22 +91,19 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.LogAnalyticsQueryPack"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("LogAnalyticsQueryPackListResult", pipeline_response) @@ -118,10 +113,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -134,8 +130,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/queryPacks"} - @distributed_trace def list_by_resource_group( self, resource_group_name: str, **kwargs: Any @@ -145,7 +139,6 @@ def list_by_resource_group( :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPack or the result of cls(response) :rtype: @@ -155,10 +148,10 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPackListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -169,23 +162,20 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("LogAnalyticsQueryPackListResult", pipeline_response) @@ -195,10 +185,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -211,10 +202,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks" - } - @overload async def create_or_update_without_name( self, @@ -236,7 +223,6 @@ async def create_or_update_without_name( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -246,7 +232,7 @@ async def create_or_update_without_name( async def create_or_update_without_name( self, resource_group_name: str, - log_analytics_query_pack_payload: IO, + log_analytics_query_pack_payload: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -259,11 +245,10 @@ async def create_or_update_without_name( :type resource_group_name: str :param log_analytics_query_pack_payload: Properties that need to be specified to create or update a Log Analytics QueryPack. Required. - :type log_analytics_query_pack_payload: IO + :type log_analytics_query_pack_payload: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -273,7 +258,7 @@ async def create_or_update_without_name( async def create_or_update_without_name( self, resource_group_name: str, - log_analytics_query_pack_payload: Union[_models.LogAnalyticsQueryPack, IO], + log_analytics_query_pack_payload: Union[_models.LogAnalyticsQueryPack, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPack: """Creates a Log Analytics QueryPack. Note: You cannot specify a different value for @@ -283,18 +268,15 @@ async def create_or_update_without_name( Required. :type resource_group_name: str :param log_analytics_query_pack_payload: Properties that need to be specified to create or - update a Log Analytics QueryPack. Is either a model type or a IO type. Required. + update a Log Analytics QueryPack. Is either a LogAnalyticsQueryPack type or a IO[bytes] type. + Required. :type log_analytics_query_pack_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack - or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + or IO[bytes] :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -305,34 +287,33 @@ async def create_or_update_without_name( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(log_analytics_query_pack_payload, (IO, bytes)): + if isinstance(log_analytics_query_pack_payload, (IOBase, bytes)): _content = log_analytics_query_pack_payload else: _json = self._serialize.body(log_analytics_query_pack_payload, "LogAnalyticsQueryPack") - request = build_create_or_update_without_name_request( + _request = build_create_or_update_without_name_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update_without_name.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -342,16 +323,12 @@ async def create_or_update_without_name( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update_without_name.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -364,12 +341,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type resource_group_name: str :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -380,23 +356,22 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -407,11 +382,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async async def get(self, resource_group_name: str, query_pack_name: str, **kwargs: Any) -> _models.LogAnalyticsQueryPack: @@ -422,12 +393,11 @@ async def get(self, resource_group_name: str, query_pack_name: str, **kwargs: An :type resource_group_name: str :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -438,23 +408,22 @@ async def get(self, resource_group_name: str, query_pack_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -464,16 +433,12 @@ async def get(self, resource_group_name: str, query_pack_name: str, **kwargs: An error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}" - } + return deserialized # type: ignore @overload async def create_or_update( @@ -499,7 +464,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -510,7 +474,7 @@ async def create_or_update( self, resource_group_name: str, query_pack_name: str, - log_analytics_query_pack_payload: IO, + log_analytics_query_pack_payload: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -525,11 +489,10 @@ async def create_or_update( :type query_pack_name: str :param log_analytics_query_pack_payload: Properties that need to be specified to create or update a Log Analytics QueryPack. Required. - :type log_analytics_query_pack_payload: IO + :type log_analytics_query_pack_payload: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -540,7 +503,7 @@ async def create_or_update( self, resource_group_name: str, query_pack_name: str, - log_analytics_query_pack_payload: Union[_models.LogAnalyticsQueryPack, IO], + log_analytics_query_pack_payload: Union[_models.LogAnalyticsQueryPack, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPack: """Creates (or updates) a Log Analytics QueryPack. Note: You cannot specify a different value for @@ -552,18 +515,15 @@ async def create_or_update( :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str :param log_analytics_query_pack_payload: Properties that need to be specified to create or - update a Log Analytics QueryPack. Is either a model type or a IO type. Required. + update a Log Analytics QueryPack. Is either a LogAnalyticsQueryPack type or a IO[bytes] type. + Required. :type log_analytics_query_pack_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack - or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + or IO[bytes] :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -574,19 +534,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(log_analytics_query_pack_payload, (IO, bytes)): + if isinstance(log_analytics_query_pack_payload, (IOBase, bytes)): _content = log_analytics_query_pack_payload else: _json = self._serialize.body(log_analytics_query_pack_payload, "LogAnalyticsQueryPack") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, @@ -594,15 +554,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -612,16 +571,12 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}" - } + return deserialized # type: ignore @overload async def update_tags( @@ -645,7 +600,6 @@ async def update_tags( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -656,7 +610,7 @@ async def update_tags( self, resource_group_name: str, query_pack_name: str, - query_pack_tags: IO, + query_pack_tags: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -669,11 +623,10 @@ async def update_tags( :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str :param query_pack_tags: Updated tag information to set into the QueryPack instance. Required. - :type query_pack_tags: IO + :type query_pack_tags: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -684,7 +637,7 @@ async def update_tags( self, resource_group_name: str, query_pack_name: str, - query_pack_tags: Union[_models.TagsResource, IO], + query_pack_tags: Union[_models.TagsResource, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPack: """Updates an existing QueryPack's tags. To update other fields use the CreateOrUpdate method. @@ -695,17 +648,13 @@ async def update_tags( :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str :param query_pack_tags: Updated tag information to set into the QueryPack instance. Is either a - model type or a IO type. Required. - :type query_pack_tags: ~azure.mgmt.loganalytics.models.TagsResource or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + TagsResource type or a IO[bytes] type. Required. + :type query_pack_tags: ~azure.mgmt.loganalytics.models.TagsResource or IO[bytes] :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -716,19 +665,19 @@ async def update_tags( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(query_pack_tags, (IO, bytes)): + if isinstance(query_pack_tags, (IOBase, bytes)): _content = query_pack_tags else: _json = self._serialize.body(query_pack_tags, "TagsResource") - request = build_update_tags_request( + _request = build_update_tags_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, @@ -736,15 +685,14 @@ async def update_tags( content_type=content_type, json=_json, content=_content, - template_url=self.update_tags.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -754,13 +702,9 @@ async def update_tags( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update_tags.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_saved_searches_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_saved_searches_operations.py index 26eada4894c7..1bbf27a480ba 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_saved_searches_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_saved_searches_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._saved_searches_operations import ( build_create_or_update_request, build_delete_request, @@ -33,10 +32,10 @@ build_list_by_workspace_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -73,12 +72,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param saved_search_id: The id of the saved search. Required. :type saved_search_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -89,24 +87,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, saved_search_id=saved_search_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -116,11 +113,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload async def create_or_update( @@ -147,7 +140,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SavedSearch or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: @@ -159,7 +151,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, saved_search_id: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -174,11 +166,10 @@ async def create_or_update( :param saved_search_id: The id of the saved search. Required. :type saved_search_id: str :param parameters: The parameters required to save a search. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SavedSearch or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: @@ -190,7 +181,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, saved_search_id: str, - parameters: Union[_models.SavedSearch, IO], + parameters: Union[_models.SavedSearch, IO[bytes]], **kwargs: Any ) -> _models.SavedSearch: """Creates or updates a saved search for a given workspace. @@ -202,18 +193,14 @@ async def create_or_update( :type workspace_name: str :param saved_search_id: The id of the saved search. Required. :type saved_search_id: str - :param parameters: The parameters required to save a search. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.SavedSearch or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: The parameters required to save a search. Is either a SavedSearch type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SavedSearch or IO[bytes] :return: SavedSearch or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -224,19 +211,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SavedSearch] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "SavedSearch") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, saved_search_id=saved_search_id, @@ -245,15 +232,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -262,16 +248,12 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SavedSearch", pipeline_response) + deserialized = self._deserialize("SavedSearch", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}" - } + return deserialized # type: ignore @distributed_trace_async async def get( @@ -286,12 +268,11 @@ async def get( :type workspace_name: str :param saved_search_id: The id of the saved search. Required. :type saved_search_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SavedSearch or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -302,24 +283,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SavedSearch] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, saved_search_id=saved_search_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -328,16 +308,12 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SavedSearch", pipeline_response) + deserialized = self._deserialize("SavedSearch", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}" - } + return deserialized # type: ignore @distributed_trace_async async def list_by_workspace( @@ -350,12 +326,11 @@ async def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SavedSearchesListResult or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearchesListResult :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -366,23 +341,22 @@ async def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SavedSearchesListResult] = kwargs.pop("cls", None) - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -391,13 +365,9 @@ async def list_by_workspace( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SavedSearchesListResult", pipeline_response) + deserialized = self._deserialize("SavedSearchesListResult", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_schema_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_schema_operations.py index 32ab9266d18b..fb7220cd78cf 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_schema_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_schema_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._schema_operations import build_get_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -66,12 +64,11 @@ async def get( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SearchGetSchemaResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SearchGetSchemaResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -82,23 +79,22 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SearchGetSchemaResponse] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -107,13 +103,9 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SearchGetSchemaResponse", pipeline_response) + deserialized = self._deserialize("SearchGetSchemaResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/schema" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_shared_keys_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_shared_keys_operations.py index 53753db63415..67c955511b62 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_shared_keys_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_shared_keys_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._shared_keys_operations import build_get_shared_keys_request, build_regenerate_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -64,12 +62,11 @@ async def get_shared_keys(self, resource_group_name: str, workspace_name: str, * :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SharedKeys or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SharedKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -80,23 +77,22 @@ async def get_shared_keys(self, resource_group_name: str, workspace_name: str, * _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SharedKeys] = kwargs.pop("cls", None) - request = build_get_shared_keys_request( + _request = build_get_shared_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_shared_keys.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -105,16 +101,12 @@ async def get_shared_keys(self, resource_group_name: str, workspace_name: str, * map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SharedKeys", pipeline_response) + deserialized = self._deserialize("SharedKeys", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get_shared_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/sharedKeys" - } + return deserialized # type: ignore @distributed_trace_async async def regenerate(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.SharedKeys: @@ -126,12 +118,11 @@ async def regenerate(self, resource_group_name: str, workspace_name: str, **kwar :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SharedKeys or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SharedKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -142,23 +133,22 @@ async def regenerate(self, resource_group_name: str, workspace_name: str, **kwar _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SharedKeys] = kwargs.pop("cls", None) - request = build_regenerate_request( + _request = build_regenerate_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.regenerate.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -167,13 +157,9 @@ async def regenerate(self, resource_group_name: str, workspace_name: str, **kwar map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SharedKeys", pipeline_response) + deserialized = self._deserialize("SharedKeys", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - regenerate.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/regenerateSharedKey" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_storage_insight_configs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_storage_insight_configs_operations.py index cad2ce79f323..7a50bfcce3d3 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_storage_insight_configs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_storage_insight_configs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,15 +20,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._storage_insight_configs_operations import ( build_create_or_update_request, build_delete_request, @@ -35,10 +34,10 @@ build_list_by_workspace_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -87,7 +86,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageInsight or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: @@ -99,7 +97,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, storage_insight_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -114,11 +112,10 @@ async def create_or_update( :param storage_insight_name: Name of the storageInsightsConfigs resource. Required. :type storage_insight_name: str :param parameters: The parameters required to create or update a storage insight. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageInsight or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: @@ -130,7 +127,7 @@ async def create_or_update( resource_group_name: str, workspace_name: str, storage_insight_name: str, - parameters: Union[_models.StorageInsight, IO], + parameters: Union[_models.StorageInsight, IO[bytes]], **kwargs: Any ) -> _models.StorageInsight: """Create or update a storage insight. @@ -143,17 +140,13 @@ async def create_or_update( :param storage_insight_name: Name of the storageInsightsConfigs resource. Required. :type storage_insight_name: str :param parameters: The parameters required to create or update a storage insight. Is either a - model type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.StorageInsight or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + StorageInsight type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.StorageInsight or IO[bytes] :return: StorageInsight or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -164,19 +157,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.StorageInsight] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "StorageInsight") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, storage_insight_name=storage_insight_name, @@ -185,15 +178,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -202,21 +194,13 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("StorageInsight", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("StorageInsight", pipeline_response) + deserialized = self._deserialize("StorageInsight", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}" - } - @distributed_trace_async async def get( self, resource_group_name: str, workspace_name: str, storage_insight_name: str, **kwargs: Any @@ -230,12 +214,11 @@ async def get( :type workspace_name: str :param storage_insight_name: Name of the storageInsightsConfigs resource. Required. :type storage_insight_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageInsight or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -246,24 +229,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.StorageInsight] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, storage_insight_name=storage_insight_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -272,16 +254,12 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("StorageInsight", pipeline_response) + deserialized = self._deserialize("StorageInsight", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -296,12 +274,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param storage_insight_name: Name of the storageInsightsConfigs resource. Required. :type storage_insight_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -312,24 +289,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, storage_insight_name=storage_insight_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -339,11 +315,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def list_by_workspace( @@ -356,7 +328,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either StorageInsight or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.StorageInsight] :raises ~azure.core.exceptions.HttpResponseError: @@ -364,10 +335,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.StorageInsightListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -378,24 +349,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("StorageInsightListResult", pipeline_response) @@ -405,10 +373,11 @@ async def extract_data(pipeline_response): return deserialized.odata_next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -419,7 +388,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py index f4351aae3024..1189e01c9d95 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +17,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +31,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._tables_operations import ( build_cancel_search_request, build_create_or_update_request, @@ -40,10 +41,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -78,7 +79,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Table or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -86,10 +86,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.TablesListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -100,24 +100,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("TablesListResult", pipeline_response) @@ -127,10 +124,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -143,19 +141,15 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables" - } - async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, table_name: str, - parameters: Union[_models.Table, IO], + parameters: Union[_models.Table, IO[bytes]], **kwargs: Any - ) -> Optional[_models.Table]: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -166,19 +160,19 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Table]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Table") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, @@ -187,36 +181,34 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Table", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return deserialized # type: ignore @overload async def begin_create_or_update( @@ -243,14 +235,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -262,7 +246,7 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, table_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -277,18 +261,10 @@ async def begin_create_or_update( :param table_name: The name of the table. Required. :type table_name: str :param parameters: The parameters required to update table properties. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -300,7 +276,7 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, table_name: str, - parameters: Union[_models.Table, IO], + parameters: Union[_models.Table, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[_models.Table]: """Update or Create a Log Analytics workspace table. @@ -312,20 +288,9 @@ async def begin_create_or_update( :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :param parameters: The parameters required to update table properties. Is either a model type - or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.Table or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: The parameters required to update table properties. Is either a Table type + or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.Table or IO[bytes] :return: An instance of AsyncLROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -333,7 +298,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -352,12 +317,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Table", pipeline_response) + deserialized = self._deserialize("Table", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -370,27 +336,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.Table].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return AsyncLROPoller[_models.Table]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) async def _update_initial( self, resource_group_name: str, workspace_name: str, table_name: str, - parameters: Union[_models.Table, IO], + parameters: Union[_models.Table, IO[bytes]], **kwargs: Any - ) -> Optional[_models.Table]: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -401,19 +365,19 @@ async def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Table]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Table") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, @@ -422,36 +386,34 @@ async def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Table", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return deserialized # type: ignore @overload async def begin_update( @@ -478,14 +440,6 @@ async def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -497,7 +451,7 @@ async def begin_update( resource_group_name: str, workspace_name: str, table_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -512,18 +466,10 @@ async def begin_update( :param table_name: The name of the table. Required. :type table_name: str :param parameters: The parameters required to update table properties. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -535,7 +481,7 @@ async def begin_update( resource_group_name: str, workspace_name: str, table_name: str, - parameters: Union[_models.Table, IO], + parameters: Union[_models.Table, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[_models.Table]: """Update a Log Analytics workspace table. @@ -547,20 +493,9 @@ async def begin_update( :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :param parameters: The parameters required to update table properties. Is either a model type - or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.Table or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: The parameters required to update table properties. Is either a Table type + or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.Table or IO[bytes] :return: An instance of AsyncLROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -568,7 +503,7 @@ async def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -587,12 +522,13 @@ async def begin_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Table", pipeline_response) + deserialized = self._deserialize("Table", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -605,17 +541,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.Table].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return AsyncLROPoller[_models.Table]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace_async async def get(self, resource_group_name: str, workspace_name: str, table_name: str, **kwargs: Any) -> _models.Table: @@ -628,12 +562,11 @@ async def get(self, resource_group_name: str, workspace_name: str, table_name: s :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Table or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Table :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -644,24 +577,23 @@ async def get(self, resource_group_name: str, workspace_name: str, table_name: s _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.Table] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -671,21 +603,17 @@ async def get(self, resource_group_name: str, workspace_name: str, table_name: s error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Table", pipeline_response) + deserialized = self._deserialize("Table", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return deserialized # type: ignore - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, workspace_name: str, table_name: str, **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -696,39 +624,43 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -743,14 +675,6 @@ async def begin_delete( :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -758,13 +682,13 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, @@ -774,11 +698,12 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast( @@ -790,17 +715,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace_async async def migrate( # pylint: disable=inconsistent-return-statements @@ -816,12 +737,11 @@ async def migrate( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -832,24 +752,23 @@ async def migrate( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_migrate_request( + _request = build_migrate_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.migrate.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -860,11 +779,7 @@ async def migrate( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - migrate.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}/migrate" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async async def cancel_search( # pylint: disable=inconsistent-return-statements @@ -879,12 +794,11 @@ async def cancel_search( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -895,24 +809,23 @@ async def cancel_search( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_cancel_search_request( + _request = build_cancel_search_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.cancel_search.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -923,8 +836,4 @@ async def cancel_search( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - cancel_search.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}/cancelSearch" - } + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_usages_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_usages_operations.py index 846472fb2fc0..380bde0cefff 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_usages_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_usages_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -19,20 +19,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._usages_operations import build_list_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -67,7 +65,6 @@ def list( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either UsageMetric or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.UsageMetric] :raises ~azure.core.exceptions.HttpResponseError: @@ -75,10 +72,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.WorkspaceListUsagesResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -89,24 +86,21 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListUsagesResult", pipeline_response) @@ -116,10 +110,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -130,7 +125,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/usages" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspace_purge_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspace_purge_operations.py index 2e272a393e5a..f988193e51e6 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspace_purge_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspace_purge_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +19,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._workspace_purge_operations import build_get_purge_status_request, build_purge_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -87,7 +86,6 @@ async def purge( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: WorkspacePurgeResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -98,7 +96,7 @@ async def purge( self, resource_group_name: str, workspace_name: str, - body: IO, + body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -121,11 +119,10 @@ async def purge( :type workspace_name: str :param body: Describes the body of a request to purge data in a single table of an Log Analytics Workspace. Required. - :type body: IO + :type body: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: WorkspacePurgeResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -133,7 +130,11 @@ async def purge( @distributed_trace_async async def purge( - self, resource_group_name: str, workspace_name: str, body: Union[_models.WorkspacePurgeBody, IO], **kwargs: Any + self, + resource_group_name: str, + workspace_name: str, + body: Union[_models.WorkspacePurgeBody, IO[bytes]], + **kwargs: Any ) -> _models.WorkspacePurgeResponse: """Purges data in an Log Analytics workspace by a set of user-defined filters. @@ -152,17 +153,13 @@ async def purge( :param workspace_name: The name of the workspace. Required. :type workspace_name: str :param body: Describes the body of a request to purge data in a single table of an Log - Analytics Workspace. Is either a model type or a IO type. Required. - :type body: ~azure.mgmt.loganalytics.models.WorkspacePurgeBody or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + Analytics Workspace. Is either a WorkspacePurgeBody type or a IO[bytes] type. Required. + :type body: ~azure.mgmt.loganalytics.models.WorkspacePurgeBody or IO[bytes] :return: WorkspacePurgeResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -173,19 +170,19 @@ async def purge( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.WorkspacePurgeResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(body, (IO, bytes)): + if isinstance(body, (IOBase, bytes)): _content = body else: _json = self._serialize.body(body, "WorkspacePurgeBody") - request = build_purge_request( + _request = build_purge_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, @@ -193,15 +190,14 @@ async def purge( content_type=content_type, json=_json, content=_content, - template_url=self.purge.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -215,16 +211,12 @@ async def purge( "str", response.headers.get("x-ms-status-location") ) - deserialized = self._deserialize("WorkspacePurgeResponse", pipeline_response) + deserialized = self._deserialize("WorkspacePurgeResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - purge.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/purge" - } + return deserialized # type: ignore @distributed_trace_async async def get_purge_status( @@ -240,12 +232,11 @@ async def get_purge_status( :param purge_id: In a purge status request, this is the Id of the operation the status of which is returned. Required. :type purge_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: WorkspacePurgeStatusResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeStatusResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -256,24 +247,23 @@ async def get_purge_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.WorkspacePurgeStatusResponse] = kwargs.pop("cls", None) - request = build_get_purge_status_request( + _request = build_get_purge_status_request( resource_group_name=resource_group_name, workspace_name=workspace_name, purge_id=purge_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_purge_status.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -282,13 +272,9 @@ async def get_purge_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("WorkspacePurgeStatusResponse", pipeline_response) + deserialized = self._deserialize("WorkspacePurgeStatusResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get_purge_status.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/operations/{purgeId}" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspaces_operations.py index f7fc906c1fbb..3dd7bac43089 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspaces_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +17,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +31,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._workspaces_operations import ( build_create_or_update_request, build_delete_request, @@ -39,10 +40,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -70,7 +71,6 @@ def __init__(self, *args, **kwargs) -> None: def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: """Gets the workspaces in a subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -78,10 +78,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -92,22 +92,19 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) @@ -117,10 +114,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -133,8 +131,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/workspaces"} - @distributed_trace def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: """Gets workspaces in a resource group. @@ -142,7 +138,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -150,10 +145,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -164,23 +159,20 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) @@ -190,10 +182,11 @@ async def extract_data(pipeline_response): return None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -206,14 +199,14 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces" - } - async def _create_or_update_initial( - self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any - ) -> Optional[_models.Workspace]: - error_map = { + self, + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.Workspace, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -224,19 +217,19 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Workspace") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, @@ -244,39 +237,34 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Workspace", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("Workspace", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return deserialized # type: ignore @overload async def begin_create_or_update( @@ -300,14 +288,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Workspace] @@ -319,7 +299,7 @@ async def begin_create_or_update( self, resource_group_name: str, workspace_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -332,18 +312,10 @@ async def begin_create_or_update( :param workspace_name: The name of the workspace. Required. :type workspace_name: str :param parameters: The parameters required to create or update a workspace. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Workspace] @@ -352,7 +324,11 @@ async def begin_create_or_update( @distributed_trace_async async def begin_create_or_update( - self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + self, + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.Workspace, IO[bytes]], + **kwargs: Any ) -> AsyncLROPoller[_models.Workspace]: """Create or update a workspace. @@ -361,20 +337,9 @@ async def begin_create_or_update( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param parameters: The parameters required to create or update a workspace. Is either a model - type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.Workspace or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: The parameters required to create or update a workspace. Is either a + Workspace type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.Workspace or IO[bytes] :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.Workspace] @@ -383,7 +348,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -401,12 +366,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Workspace", pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -416,22 +382,20 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.Workspace].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return AsyncLROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, workspace_name: str, force: Optional[bool] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -442,39 +406,43 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, force=force, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -493,14 +461,6 @@ async def begin_delete( :param force: Deletes the workspace without the recovery option. A workspace that was deleted with this flag cannot be recovered. Default value is None. :type force: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -508,13 +468,13 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, force=force, @@ -524,11 +484,12 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -537,17 +498,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace_async async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: @@ -558,12 +515,11 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -574,23 +530,22 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -600,16 +555,12 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Workspace", pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return deserialized # type: ignore @overload async def update( @@ -633,7 +584,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: @@ -644,7 +594,7 @@ async def update( self, resource_group_name: str, workspace_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -657,11 +607,10 @@ async def update( :param workspace_name: The name of the workspace. Required. :type workspace_name: str :param parameters: The parameters required to patch a workspace. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: @@ -672,7 +621,7 @@ async def update( self, resource_group_name: str, workspace_name: str, - parameters: Union[_models.WorkspacePatch, IO], + parameters: Union[_models.WorkspacePatch, IO[bytes]], **kwargs: Any ) -> _models.Workspace: """Updates a workspace. @@ -682,18 +631,14 @@ async def update( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param parameters: The parameters required to patch a workspace. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.WorkspacePatch or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: The parameters required to patch a workspace. Is either a WorkspacePatch + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.WorkspacePatch or IO[bytes] :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -704,19 +649,19 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "WorkspacePatch") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, @@ -724,15 +669,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -742,13 +686,9 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Workspace", pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py index 7e7d116ee362..7fedf4cf528b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py @@ -39,6 +39,7 @@ from ._models_py3 import LogAnalyticsQueryPackQueryPropertiesRelated from ._models_py3 import LogAnalyticsQueryPackQuerySearchProperties from ._models_py3 import LogAnalyticsQueryPackQuerySearchPropertiesRelated +from ._models_py3 import ManagedServiceIdentity from ._models_py3 import ManagementGroup from ._models_py3 import MetricName from ._models_py3 import Operation @@ -73,6 +74,7 @@ from ._models_py3 import TagsResource from ._models_py3 import TrackedResource from ._models_py3 import UsageMetric +from ._models_py3 import UserAssignedIdentity from ._models_py3 import UserIdentityProperties from ._models_py3 import Workspace from ._models_py3 import WorkspaceCapping @@ -100,6 +102,7 @@ from ._log_analytics_management_client_enums import DataSourceType from ._log_analytics_management_client_enums import IdentityType from ._log_analytics_management_client_enums import LinkedServiceEntityStatus +from ._log_analytics_management_client_enums import ManagedServiceIdentityType from ._log_analytics_management_client_enums import ProvisioningStateEnum from ._log_analytics_management_client_enums import PublicNetworkAccessType from ._log_analytics_management_client_enums import PurgeState @@ -151,6 +154,7 @@ "LogAnalyticsQueryPackQueryPropertiesRelated", "LogAnalyticsQueryPackQuerySearchProperties", "LogAnalyticsQueryPackQuerySearchPropertiesRelated", + "ManagedServiceIdentity", "ManagementGroup", "MetricName", "Operation", @@ -185,6 +189,7 @@ "TagsResource", "TrackedResource", "UsageMetric", + "UserAssignedIdentity", "UserIdentityProperties", "Workspace", "WorkspaceCapping", @@ -211,6 +216,7 @@ "DataSourceType", "IdentityType", "LinkedServiceEntityStatus", + "ManagedServiceIdentityType", "ProvisioningStateEnum", "PublicNetworkAccessType", "PurgeState", diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_log_analytics_management_client_enums.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_log_analytics_management_client_enums.py index bac07e0c9c32..612e6e0c9aa8 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_log_analytics_management_client_enums.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_log_analytics_management_client_enums.py @@ -21,12 +21,19 @@ class BillingType(str, Enum, metaclass=CaseInsensitiveEnumMeta): class Capacity(int, Enum, metaclass=CaseInsensitiveEnumMeta): - """The capacity value.""" + """The capacity reservation level in Gigabytes for this cluster.""" + ONE_HUNDRED = 100 + TWO_HUNDRED = 200 + THREE_HUNDRED = 300 + FOUR_HUNDRED = 400 FIVE_HUNDRED = 500 TEN_HUNDRED = 1000 TWO_THOUSAND = 2000 FIVE_THOUSAND = 5000 + TEN_THOUSAND = 10000 + TWENTY_FIVE_THOUSAND = 25000 + FIFTY_THOUSAND = 50000 class CapacityReservationLevel(int, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -57,7 +64,7 @@ class ClusterEntityStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): class ClusterSkuNameEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The name of the SKU.""" + """The SKU (tier) of a cluster.""" CAPACITY_RESERVATION = "CapacityReservation" @@ -65,16 +72,16 @@ class ClusterSkuNameEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): class ColumnDataTypeHintEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Column data type logical hint.""" - #: A string that matches the pattern of a URI, for example, - #: scheme://username:password@host:1234/this/is/a/path?k1=v1&k2=v2#fragment URI = "uri" - #: A standard 128-bit GUID following the standard shape, xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + """A string that matches the pattern of a URI, for example, + scheme://username:password@host:1234/this/is/a/path?k1=v1&k2=v2#fragment""" GUID = "guid" - #: An Azure Resource Model (ARM) path: - #: /subscriptions/{...}/resourceGroups/{...}/providers/Microsoft.{...}/{...}/{...}/{...}... + """A standard 128-bit GUID following the standard shape, xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx""" ARM_PATH = "armPath" - #: A standard V4/V6 ip address following the standard shape, x.x.x.x/y:y:y:y:y:y:y:y + """An Azure Resource Model (ARM) path: + /subscriptions/{...}/resourceGroups/{...}/providers/Microsoft.{...}/{...}/{...}/{...}...""" IP = "ip" + """A standard V4/V6 ip address following the standard shape, x.x.x.x/y:y:y:y:y:y:y:y""" class ColumnTypeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -88,6 +95,7 @@ class ColumnTypeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): DATE_TIME = "dateTime" GUID = "guid" DYNAMIC = "dynamic" + INT_ENUM = "int" class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -102,18 +110,18 @@ class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): class DataIngestionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The status of data ingestion for this workspace.""" - #: Ingestion enabled following daily cap quota reset, or subscription enablement. RESPECT_QUOTA = "RespectQuota" - #: Ingestion started following service setting change. + """Ingestion enabled following daily cap quota reset, or subscription enablement.""" FORCE_ON = "ForceOn" - #: Ingestion stopped following service setting change. + """Ingestion started following service setting change.""" FORCE_OFF = "ForceOff" - #: Reached daily cap quota, ingestion stopped. + """Ingestion stopped following service setting change.""" OVER_QUOTA = "OverQuota" - #: Ingestion stopped following suspended subscription. + """Reached daily cap quota, ingestion stopped.""" SUBSCRIPTION_SUSPENDED = "SubscriptionSuspended" - #: 80% of daily cap quota reached. + """Ingestion stopped following suspended subscription.""" APPROACHING_QUOTA = "ApproachingQuota" + """80% of daily cap quota reached.""" class DataSourceKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -185,27 +193,40 @@ class LinkedServiceEntityStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): UPDATING = "Updating" +class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of managed service identity (where both SystemAssigned and UserAssigned types are + allowed). + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + + class ProvisioningStateEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Table's current provisioning state. If set to 'updating', indicates a resource lock due to ongoing operation, forbidding any update to the table until the ongoing operation is concluded. """ - #: Table schema is still being built and updated, table is currently locked for any changes till - #: the procedure is done. UPDATING = "Updating" - #: Table schema is stable and without changes, table data is being updated. + """Table schema is still being built and updated, table is currently locked for any changes till + the procedure is done.""" IN_PROGRESS = "InProgress" - #: Table state is stable and without changes, table is unlocked and open for new updates. + """Table schema is stable and without changes, table data is being updated.""" SUCCEEDED = "Succeeded" + """Table state is stable and without changes, table is unlocked and open for new updates.""" + DELETING = "Deleting" + """Table state is deleting.""" class PublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The network access type for operating on the Log Analytics Workspace. By default it is Enabled.""" - #: Enables connectivity to Log Analytics through public DNS. ENABLED = "Enabled" - #: Disables public connectivity to Log Analytics through public DNS. + """Enables connectivity to Log Analytics through public DNS.""" DISABLED = "Disabled" + """Disables public connectivity to Log Analytics through public DNS.""" class PurgeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -237,11 +258,11 @@ class SkuNameEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): class SourceEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Table's creator.""" - #: Tables provisioned by the system, as collected via Diagnostic Settings, the Agents, or any - #: other standard data collection means. MICROSOFT = "microsoft" - #: Tables created by the owner of the Workspace, and only found in this Workspace. + """Tables provisioned by the system, as collected via Diagnostic Settings, the Agents, or any + other standard data collection means.""" CUSTOMER = "customer" + """Tables created by the owner of the Workspace, and only found in this Workspace.""" class StorageInsightState(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -254,10 +275,10 @@ class StorageInsightState(str, Enum, metaclass=CaseInsensitiveEnumMeta): class TablePlanEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Instruct the system how to handle and charge the logs ingested to this table.""" - #: Logs that are adjusted to support high volume low value verbose logs. BASIC = "Basic" - #: Logs that allow monitoring and analytics. + """Logs that are adjusted to support high volume low value verbose logs.""" ANALYTICS = "Analytics" + """Logs that allow monitoring and analytics.""" class TableSubTypeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -265,28 +286,28 @@ class TableSubTypeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): available against it. """ - #: The default subtype with which built-in tables are created. ANY = "Any" - #: Indicates a table created through the Data Collector API or with the custom logs feature of the - #: MMA agent, or any table against which Custom Fields were created. + """The default subtype with which built-in tables are created.""" CLASSIC = "Classic" - #: A table eligible to have data sent into it via any of the means supported by Data Collection - #: Rules: the Data Collection Endpoint API, ingestion-time transformations, or any other mechanism - #: provided by Data Collection Rules + """Indicates a table created through the Data Collector API or with the custom logs feature of the + MMA agent, or any table against which Custom Fields were created.""" DATA_COLLECTION_RULE_BASED = "DataCollectionRuleBased" + """A table eligible to have data sent into it via any of the means supported by Data Collection + Rules: the Data Collection Endpoint API, ingestion-time transformations, or any other mechanism + provided by Data Collection Rules""" class TableTypeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Table's creator.""" - #: Standard data collected by Azure Monitor. MICROSOFT = "Microsoft" - #: Custom log table. + """Standard data collected by Azure Monitor.""" CUSTOM_LOG = "CustomLog" - #: Restored data. + """Custom log table.""" RESTORED_LOGS = "RestoredLogs" - #: Data collected by a search job. + """Restored data.""" SEARCH_RESULTS = "SearchResults" + """Data collected by a search job.""" class Type(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py index 66ed9d5345a5..eaba73b1ab6d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py @@ -29,11 +29,12 @@ class AssociatedWorkspace(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar workspace_id: The id of the assigned workspace. + :ivar workspace_id: Associated workspace immutable id. :vartype workspace_id: str - :ivar workspace_name: The name id the assigned workspace. + :ivar workspace_name: Associated workspace resource name. :vartype workspace_name: str - :ivar resource_id: The ResourceId id the assigned workspace. + :ivar resource_id: Associated workspace arm resource id, in the form of: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}. # pylint: disable=line-too-long :vartype resource_id: str :ivar associate_date: The time of workspace association. :vartype associate_date: str @@ -53,7 +54,7 @@ class AssociatedWorkspace(_serialization.Model): "associate_date": {"key": "associateDate", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.workspace_id = None @@ -106,7 +107,7 @@ class AvailableServiceTier(_serialization.Model): "last_sku_update": {"key": "lastSkuUpdate", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.service_tier = None @@ -124,7 +125,7 @@ class Resource(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -145,7 +146,7 @@ class Resource(_serialization.Model): "type": {"key": "type", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -159,7 +160,7 @@ class AzureEntityResource(Resource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -184,7 +185,7 @@ class AzureEntityResource(Resource): "etag": {"key": "etag", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.etag = None @@ -219,7 +220,7 @@ class AzureResourceProperties(_serialization.Model): "system_data": {"key": "systemData", "type": "SystemData"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -235,7 +236,7 @@ class CapacityReservationProperties(_serialization.Model): :ivar last_sku_update: The last time Sku was updated. :vartype last_sku_update: str - :ivar min_capacity: Minimum CapacityReservation value in GB. + :ivar min_capacity: Minimum CapacityReservation value in Gigabytes. :vartype min_capacity: int """ @@ -249,7 +250,7 @@ class CapacityReservationProperties(_serialization.Model): "min_capacity": {"key": "minCapacity", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.last_sku_update = None @@ -257,14 +258,15 @@ def __init__(self, **kwargs): class TrackedResource(Resource): - """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. + """The resource model definition for an Azure Resource Manager tracked top level resource which + has 'tags' and a 'location'. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -292,7 +294,7 @@ class TrackedResource(Resource): "location": {"key": "location", "type": "str"}, } - def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs): + def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] @@ -309,10 +311,10 @@ class Cluster(TrackedResource): # pylint: disable=too-many-instance-attributes Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -323,8 +325,8 @@ class Cluster(TrackedResource): # pylint: disable=too-many-instance-attributes :vartype tags: dict[str, str] :ivar location: The geo-location where the resource lives. Required. :vartype location: str - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.loganalytics.models.Identity + :ivar identity: Resource's identity. + :vartype identity: ~azure.mgmt.loganalytics.models.ManagedServiceIdentity :ivar sku: The sku properties. :vartype sku: ~azure.mgmt.loganalytics.models.ClusterSku :ivar cluster_id: The ID associated with the cluster. @@ -372,7 +374,7 @@ class Cluster(TrackedResource): # pylint: disable=too-many-instance-attributes "type": {"key": "type", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "location": {"key": "location", "type": "str"}, - "identity": {"key": "identity", "type": "Identity"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, "sku": {"key": "sku", "type": "ClusterSku"}, "cluster_id": {"key": "properties.clusterId", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, @@ -394,7 +396,7 @@ def __init__( *, location: str, tags: Optional[Dict[str, str]] = None, - identity: Optional["_models.Identity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, sku: Optional["_models.ClusterSku"] = None, is_double_encryption_enabled: Optional[bool] = None, is_availability_zones_enabled: Optional[bool] = None, @@ -402,15 +404,15 @@ def __init__( key_vault_properties: Optional["_models.KeyVaultProperties"] = None, associated_workspaces: Optional[List["_models.AssociatedWorkspace"]] = None, capacity_reservation_properties: Optional["_models.CapacityReservationProperties"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword location: The geo-location where the resource lives. Required. :paramtype location: str - :keyword identity: The identity of the resource. - :paramtype identity: ~azure.mgmt.loganalytics.models.Identity + :keyword identity: Resource's identity. + :paramtype identity: ~azure.mgmt.loganalytics.models.ManagedServiceIdentity :keyword sku: The sku properties. :paramtype sku: ~azure.mgmt.loganalytics.models.ClusterSku :keyword is_double_encryption_enabled: Configures whether cluster will use double encryption. @@ -462,7 +464,9 @@ class ClusterListResult(_serialization.Model): "value": {"key": "value", "type": "[Cluster]"}, } - def __init__(self, *, next_link: Optional[str] = None, value: Optional[List["_models.Cluster"]] = None, **kwargs): + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.Cluster"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link used to get the next page of recommendations. :paramtype next_link: str @@ -477,8 +481,8 @@ def __init__(self, *, next_link: Optional[str] = None, value: Optional[List["_mo class ClusterPatch(_serialization.Model): """The top level Log Analytics cluster resource container. - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.loganalytics.models.Identity + :ivar identity: Resource's identity. + :vartype identity: ~azure.mgmt.loganalytics.models.ManagedServiceIdentity :ivar sku: The sku properties. :vartype sku: ~azure.mgmt.loganalytics.models.ClusterSku :ivar tags: Resource tags. @@ -490,7 +494,7 @@ class ClusterPatch(_serialization.Model): """ _attribute_map = { - "identity": {"key": "identity", "type": "Identity"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, "sku": {"key": "sku", "type": "ClusterSku"}, "tags": {"key": "tags", "type": "{str}"}, "key_vault_properties": {"key": "properties.keyVaultProperties", "type": "KeyVaultProperties"}, @@ -500,16 +504,16 @@ class ClusterPatch(_serialization.Model): def __init__( self, *, - identity: Optional["_models.Identity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, sku: Optional["_models.ClusterSku"] = None, tags: Optional[Dict[str, str]] = None, key_vault_properties: Optional["_models.KeyVaultProperties"] = None, billing_type: Optional[Union[str, "_models.BillingType"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword identity: The identity of the resource. - :paramtype identity: ~azure.mgmt.loganalytics.models.Identity + :keyword identity: Resource's identity. + :paramtype identity: ~azure.mgmt.loganalytics.models.ManagedServiceIdentity :keyword sku: The sku properties. :paramtype sku: ~azure.mgmt.loganalytics.models.ClusterSku :keyword tags: Resource tags. @@ -531,9 +535,10 @@ def __init__( class ClusterSku(_serialization.Model): """The cluster sku definition. - :ivar capacity: The capacity value. Known values are: 500, 1000, 2000, and 5000. + :ivar capacity: The capacity reservation level in Gigabytes for this cluster. Known values are: + 100, 200, 300, 400, 500, 1000, 2000, 5000, 10000, 25000, and 50000. :vartype capacity: int or ~azure.mgmt.loganalytics.models.Capacity - :ivar name: The name of the SKU. "CapacityReservation" + :ivar name: The SKU (tier) of a cluster. "CapacityReservation" :vartype name: str or ~azure.mgmt.loganalytics.models.ClusterSkuNameEnum """ @@ -547,12 +552,13 @@ def __init__( *, capacity: Optional[Union[int, "_models.Capacity"]] = None, name: Optional[Union[str, "_models.ClusterSkuNameEnum"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword capacity: The capacity value. Known values are: 500, 1000, 2000, and 5000. + :keyword capacity: The capacity reservation level in Gigabytes for this cluster. Known values + are: 100, 200, 300, 400, 500, 1000, 2000, 5000, 10000, 25000, and 50000. :paramtype capacity: int or ~azure.mgmt.loganalytics.models.Capacity - :keyword name: The name of the SKU. "CapacityReservation" + :keyword name: The SKU (tier) of a cluster. "CapacityReservation" :paramtype name: str or ~azure.mgmt.loganalytics.models.ClusterSkuNameEnum """ super().__init__(**kwargs) @@ -568,7 +574,7 @@ class Column(_serialization.Model): :ivar name: Column name. :vartype name: str :ivar type: Column data type. Known values are: "string", "int", "long", "real", "boolean", - "dateTime", "guid", and "dynamic". + "dateTime", "guid", "dynamic", and "int". :vartype type: str or ~azure.mgmt.loganalytics.models.ColumnTypeEnum :ivar data_type_hint: Column data type logical hint. Known values are: "uri", "guid", "armPath", and "ip". @@ -606,13 +612,13 @@ def __init__( data_type_hint: Optional[Union[str, "_models.ColumnDataTypeHintEnum"]] = None, display_name: Optional[str] = None, description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Column name. :paramtype name: str :keyword type: Column data type. Known values are: "string", "int", "long", "real", "boolean", - "dateTime", "guid", and "dynamic". + "dateTime", "guid", "dynamic", and "int". :paramtype type: str or ~azure.mgmt.loganalytics.models.ColumnTypeEnum :keyword data_type_hint: Column data type logical hint. Known values are: "uri", "guid", "armPath", and "ip". @@ -635,7 +641,7 @@ def __init__( class CoreSummary(_serialization.Model): """The core summary of a search. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar status: The status of a core summary. :vartype status: str @@ -652,7 +658,7 @@ class CoreSummary(_serialization.Model): "number_of_documents": {"key": "numberOfDocuments", "type": "int"}, } - def __init__(self, *, number_of_documents: int, status: Optional[str] = None, **kwargs): + def __init__(self, *, number_of_documents: int, status: Optional[str] = None, **kwargs: Any) -> None: """ :keyword status: The status of a core summary. :paramtype status: str @@ -665,12 +671,13 @@ def __init__(self, *, number_of_documents: int, status: Optional[str] = None, ** class ProxyResource(Resource): - """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. + """The resource model definition for a Azure Resource Manager proxy resource. It will not have + tags and a location. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -679,22 +686,6 @@ class ProxyResource(Resource): :vartype type: str """ - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - } - - def __init__(self, **kwargs): - """ """ - super().__init__(**kwargs) - class DataExport(ProxyResource): # pylint: disable=too-many-instance-attributes """The top level data export resource container. @@ -702,7 +693,7 @@ class DataExport(ProxyResource): # pylint: disable=too-many-instance-attributes Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -761,8 +752,8 @@ def __init__( last_modified_date: Optional[str] = None, resource_id: Optional[str] = None, event_hub_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword data_export_id: The data export rule ID. :paramtype data_export_id: str @@ -803,7 +794,7 @@ class DataExportListResult(_serialization.Model): "value": {"key": "value", "type": "[DataExport]"}, } - def __init__(self, *, value: Optional[List["_models.DataExport"]] = None, **kwargs): + def __init__(self, *, value: Optional[List["_models.DataExport"]] = None, **kwargs: Any) -> None: """ :keyword value: List of data export instances within a workspace.. :paramtype value: list[~azure.mgmt.loganalytics.models.DataExport] @@ -817,10 +808,10 @@ class DataSource(ProxyResource): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -873,8 +864,8 @@ def __init__( kind: Union[str, "_models.DataSourceKind"], etag: Optional[str] = None, tags: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: The data source properties in raw json format, each kind of data source have it's own schema. Required. @@ -924,7 +915,7 @@ class DataSourceFilter(_serialization.Model): "kind": {"key": "kind", "type": "str"}, } - def __init__(self, *, kind: Optional[Union[str, "_models.DataSourceKind"]] = None, **kwargs): + def __init__(self, *, kind: Optional[Union[str, "_models.DataSourceKind"]] = None, **kwargs: Any) -> None: """ :keyword kind: The kind of the DataSource. Known values are: "WindowsEvent", "WindowsPerformanceCounter", "IISLogs", "LinuxSyslog", "LinuxSyslogCollection", @@ -958,8 +949,8 @@ class DataSourceListResult(_serialization.Model): } def __init__( - self, *, value: Optional[List["_models.DataSource"]] = None, next_link: Optional[str] = None, **kwargs - ): + self, *, value: Optional[List["_models.DataSource"]] = None, next_link: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword value: A list of datasources. :paramtype value: list[~azure.mgmt.loganalytics.models.DataSource] @@ -992,7 +983,7 @@ class ErrorAdditionalInfo(_serialization.Model): "info": {"key": "info", "type": "object"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.type = None @@ -1032,7 +1023,7 @@ class ErrorDetail(_serialization.Model): "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.code = None @@ -1043,7 +1034,8 @@ def __init__(self, **kwargs): class ErrorResponse(_serialization.Model): - """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). + """Common error response for all Azure Resource Manager APIs to return error details for failed + operations. (This also follows the OData error response format.). :ivar error: The error object. :vartype error: ~azure.mgmt.loganalytics.models.ErrorDetail @@ -1053,7 +1045,7 @@ class ErrorResponse(_serialization.Model): "error": {"key": "error", "type": "ErrorDetail"}, } - def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs): + def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs: Any) -> None: """ :keyword error: The error object. :paramtype error: ~azure.mgmt.loganalytics.models.ErrorDetail @@ -1067,7 +1059,7 @@ class Identity(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar principal_id: The principal ID of resource identity. :vartype principal_id: str @@ -1078,7 +1070,7 @@ class Identity(_serialization.Model): :vartype type: str or ~azure.mgmt.loganalytics.models.IdentityType :ivar user_assigned_identities: The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. # pylint: disable=line-too-long :vartype user_assigned_identities: dict[str, ~azure.mgmt.loganalytics.models.UserIdentityProperties] """ @@ -1101,15 +1093,15 @@ def __init__( *, type: Union[str, "_models.IdentityType"], user_assigned_identities: Optional[Dict[str, "_models.UserIdentityProperties"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword type: Type of managed service identity. Required. Known values are: "user", "application", "managedIdentity", "key", "SystemAssigned", "UserAssigned", and "None". :paramtype type: str or ~azure.mgmt.loganalytics.models.IdentityType :keyword user_assigned_identities: The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. # pylint: disable=line-too-long :paramtype user_assigned_identities: dict[str, ~azure.mgmt.loganalytics.models.UserIdentityProperties] """ @@ -1143,8 +1135,8 @@ def __init__( name: Optional[str] = None, enabled: Optional[bool] = None, display_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: The name of the intelligence pack. :paramtype name: str @@ -1187,8 +1179,8 @@ def __init__( key_name: Optional[str] = None, key_version: Optional[str] = None, key_rsa_size: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword key_vault_uri: The Key Vault uri which holds they key associated with the Log Analytics cluster. @@ -1213,7 +1205,7 @@ class LinkedService(ProxyResource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -1256,8 +1248,8 @@ def __init__( resource_id: Optional[str] = None, write_access_resource_id: Optional[str] = None, provisioning_state: Optional[Union[str, "_models.LinkedServiceEntityStatus"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] @@ -1289,7 +1281,7 @@ class LinkedServiceListResult(_serialization.Model): "value": {"key": "value", "type": "[LinkedService]"}, } - def __init__(self, *, value: Optional[List["_models.LinkedService"]] = None, **kwargs): + def __init__(self, *, value: Optional[List["_models.LinkedService"]] = None, **kwargs: Any) -> None: """ :keyword value: The list of linked service instances. :paramtype value: list[~azure.mgmt.loganalytics.models.LinkedService] @@ -1309,7 +1301,7 @@ class LinkedStorageAccountsListResult(_serialization.Model): "value": {"key": "value", "type": "[LinkedStorageAccountsResource]"}, } - def __init__(self, *, value: Optional[List["_models.LinkedStorageAccountsResource"]] = None, **kwargs): + def __init__(self, *, value: Optional[List["_models.LinkedStorageAccountsResource"]] = None, **kwargs: Any) -> None: """ :keyword value: A list of linked storage accounts instances. :paramtype value: list[~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource] @@ -1324,7 +1316,7 @@ class LinkedStorageAccountsResource(ProxyResource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -1353,7 +1345,7 @@ class LinkedStorageAccountsResource(ProxyResource): "storage_account_ids": {"key": "properties.storageAccountIds", "type": "[str]"}, } - def __init__(self, *, storage_account_ids: Optional[List[str]] = None, **kwargs): + def __init__(self, *, storage_account_ids: Optional[List[str]] = None, **kwargs: Any) -> None: """ :keyword storage_account_ids: Linked storage accounts resources ids. :paramtype storage_account_ids: list[str] @@ -1368,7 +1360,7 @@ class QueryPacksResource(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Azure resource Id. :vartype id: str @@ -1397,7 +1389,7 @@ class QueryPacksResource(_serialization.Model): "tags": {"key": "tags", "type": "{str}"}, } - def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs): + def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword location: Resource location. Required. :paramtype location: str @@ -1417,7 +1409,7 @@ class LogAnalyticsQueryPack(QueryPacksResource): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Azure resource Id. :vartype id: str @@ -1464,7 +1456,7 @@ class LogAnalyticsQueryPack(QueryPacksResource): "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } - def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs): + def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword location: Resource location. Required. :paramtype location: str @@ -1481,7 +1473,7 @@ def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kw class LogAnalyticsQueryPackListResult(_serialization.Model): """Describes the list of Log Analytics QueryPack resources. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar value: List of Log Analytics QueryPack definitions. Required. :vartype value: list[~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack] @@ -1499,7 +1491,9 @@ class LogAnalyticsQueryPackListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, *, value: List["_models.LogAnalyticsQueryPack"], next_link: Optional[str] = None, **kwargs): + def __init__( + self, *, value: List["_models.LogAnalyticsQueryPack"], next_link: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword value: List of Log Analytics QueryPack definitions. Required. :paramtype value: list[~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack] @@ -1584,8 +1578,8 @@ def __init__( related: Optional["_models.LogAnalyticsQueryPackQueryPropertiesRelated"] = None, tags: Optional[Dict[str, List[str]]] = None, properties: Optional[JSON] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword display_name: Unique display name for your query within the Query Pack. :paramtype display_name: str @@ -1616,7 +1610,7 @@ def __init__( class LogAnalyticsQueryPackQueryListResult(_serialization.Model): """Describes the list of Log Analytics QueryPack-Query resources. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar value: List of Log Analytics QueryPack Query definitions. Required. :vartype value: list[~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery] @@ -1634,7 +1628,9 @@ class LogAnalyticsQueryPackQueryListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, *, value: List["_models.LogAnalyticsQueryPackQuery"], next_link: Optional[str] = None, **kwargs): + def __init__( + self, *, value: List["_models.LogAnalyticsQueryPackQuery"], next_link: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword value: List of Log Analytics QueryPack Query definitions. Required. :paramtype value: list[~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery] @@ -1647,7 +1643,7 @@ def __init__(self, *, value: List["_models.LogAnalyticsQueryPackQuery"], next_li self.next_link = next_link -class LogAnalyticsQueryPackQueryPropertiesRelated(_serialization.Model): +class LogAnalyticsQueryPackQueryPropertiesRelated(_serialization.Model): # pylint: disable=name-too-long """The related metadata items for the function. :ivar categories: The related categories for the function. @@ -1670,8 +1666,8 @@ def __init__( categories: Optional[List[str]] = None, resource_types: Optional[List[str]] = None, solutions: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword categories: The related categories for the function. :paramtype categories: list[str] @@ -1686,7 +1682,7 @@ def __init__( self.solutions = solutions -class LogAnalyticsQueryPackQuerySearchProperties(_serialization.Model): +class LogAnalyticsQueryPackQuerySearchProperties(_serialization.Model): # pylint: disable=name-too-long """Properties that define an Log Analytics QueryPack-Query search properties. :ivar related: The related metadata items for the function. @@ -1706,8 +1702,8 @@ def __init__( *, related: Optional["_models.LogAnalyticsQueryPackQuerySearchPropertiesRelated"] = None, tags: Optional[Dict[str, List[str]]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword related: The related metadata items for the function. :paramtype related: @@ -1720,7 +1716,7 @@ def __init__( self.tags = tags -class LogAnalyticsQueryPackQuerySearchPropertiesRelated(_serialization.Model): +class LogAnalyticsQueryPackQuerySearchPropertiesRelated(_serialization.Model): # pylint: disable=name-too-long """The related metadata items for the function. :ivar categories: The related categories for the function. @@ -1743,8 +1739,8 @@ def __init__( categories: Optional[List[str]] = None, resource_types: Optional[List[str]] = None, solutions: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword categories: The related categories for the function. :paramtype categories: list[str] @@ -1759,6 +1755,70 @@ def __init__( self.solutions = solutions +class ManagedServiceIdentity(_serialization.Model): + """Managed service identity (system assigned and/or user assigned identities). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to server. + + :ivar principal_id: The service principal ID of the system assigned identity. This property + will only be provided for a system assigned identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be + provided for a system assigned identity. + :vartype tenant_id: str + :ivar type: Type of managed service identity (where both SystemAssigned and UserAssigned types + are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". + :vartype type: str or ~azure.mgmt.loganalytics.models.ManagedServiceIdentityType + :ivar user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long + The dictionary values can be empty objects ({}) in requests. + :vartype user_assigned_identities: dict[str, + ~azure.mgmt.loganalytics.models.UserAssignedIdentity] + """ + + _validation = { + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"required": True}, + } + + _attribute_map = { + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"}, + } + + def __init__( + self, + *, + type: Union[str, "_models.ManagedServiceIdentityType"], + user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword type: Type of managed service identity (where both SystemAssigned and UserAssigned + types are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". + :paramtype type: str or ~azure.mgmt.loganalytics.models.ManagedServiceIdentityType + :keyword user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long + The dictionary values can be empty objects ({}) in requests. + :paramtype user_assigned_identities: dict[str, + ~azure.mgmt.loganalytics.models.UserAssignedIdentity] + """ + super().__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + self.user_assigned_identities = user_assigned_identities + + class ManagementGroup(_serialization.Model): """A management group that is connected to a workspace. @@ -1802,8 +1862,8 @@ def __init__( data_received: Optional[datetime.datetime] = None, version: Optional[str] = None, sku: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword server_count: The number of servers connected to the management group. :paramtype server_count: int @@ -1847,7 +1907,7 @@ class MetricName(_serialization.Model): "localized_value": {"key": "localizedValue", "type": "str"}, } - def __init__(self, *, value: Optional[str] = None, localized_value: Optional[str] = None, **kwargs): + def __init__(self, *, value: Optional[str] = None, localized_value: Optional[str] = None, **kwargs: Any) -> None: """ :keyword value: The system name of the metric. :paramtype value: str @@ -1873,7 +1933,9 @@ class Operation(_serialization.Model): "display": {"key": "display", "type": "OperationDisplay"}, } - def __init__(self, *, name: Optional[str] = None, display: Optional["_models.OperationDisplay"] = None, **kwargs): + def __init__( + self, *, name: Optional[str] = None, display: Optional["_models.OperationDisplay"] = None, **kwargs: Any + ) -> None: """ :keyword name: Operation name: {provider}/{resource}/{operation}. :paramtype name: str @@ -1912,8 +1974,8 @@ def __init__( resource: Optional[str] = None, operation: Optional[str] = None, description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword provider: Service provider: Microsoft OperationsManagement. :paramtype provider: str @@ -1952,7 +2014,7 @@ class OperationListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, *, value: Optional[List["_models.Operation"]] = None, **kwargs): + def __init__(self, *, value: Optional[List["_models.Operation"]] = None, **kwargs: Any) -> None: """ :keyword value: List of solution operations supported by the OperationsManagement resource provider. @@ -1998,8 +2060,8 @@ def __init__( end_time: Optional[str] = None, status: Optional[str] = None, error: Optional["_models.ErrorResponse"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword id: The operation Id. :paramtype id: str @@ -2037,7 +2099,7 @@ class PrivateLinkScopedResource(_serialization.Model): "scope_id": {"key": "scopeId", "type": "str"}, } - def __init__(self, *, resource_id: Optional[str] = None, scope_id: Optional[str] = None, **kwargs): + def __init__(self, *, resource_id: Optional[str] = None, scope_id: Optional[str] = None, **kwargs: Any) -> None: """ :keyword resource_id: The full resource Id of the private link scope resource. :paramtype resource_id: str @@ -2081,8 +2143,8 @@ def __init__( start_restore_time: Optional[datetime.datetime] = None, end_restore_time: Optional[datetime.datetime] = None, source_table: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword start_restore_time: The timestamp to start the restore from (UTC). :paramtype start_restore_time: ~datetime.datetime @@ -2123,7 +2185,7 @@ class ResultStatistics(_serialization.Model): "scanned_gb": {"key": "scannedGb", "type": "float"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.progress = None @@ -2136,10 +2198,10 @@ class SavedSearch(ProxyResource): # pylint: disable=too-many-instance-attribute Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -2204,8 +2266,8 @@ def __init__( function_parameters: Optional[str] = None, version: Optional[int] = None, tags: Optional[List["_models.Tag"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword etag: The ETag of the saved search. To override an existing saved search, use "*" or specify the current Etag. @@ -2252,7 +2314,7 @@ class SavedSearchesListResult(_serialization.Model): "value": {"key": "value", "type": "[SavedSearch]"}, } - def __init__(self, *, value: Optional[List["_models.SavedSearch"]] = None, **kwargs): + def __init__(self, *, value: Optional[List["_models.SavedSearch"]] = None, **kwargs: Any) -> None: """ :keyword value: The array of result values. :paramtype value: list[~azure.mgmt.loganalytics.models.SavedSearch] @@ -2324,8 +2386,8 @@ def __init__( display_name: Optional[str] = None, description: Optional[str] = None, columns: Optional[List["_models.Column"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Table name. :paramtype name: str @@ -2369,8 +2431,8 @@ def __init__( *, metadata: Optional["_models.SearchMetadata"] = None, value: Optional[List["_models.SearchSchemaValue"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword metadata: The metadata from search results. :paramtype metadata: ~azure.mgmt.loganalytics.models.SearchMetadata @@ -2461,8 +2523,8 @@ def __init__( sum: Optional[int] = None, max: Optional[int] = None, # pylint: disable=redefined-builtin schema: Optional["_models.SearchMetadataSchema"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword search_id: The request id of the search. :paramtype search_id: str @@ -2533,7 +2595,7 @@ class SearchMetadataSchema(_serialization.Model): "version": {"key": "version", "type": "int"}, } - def __init__(self, *, name: Optional[str] = None, version: Optional[int] = None, **kwargs): + def __init__(self, *, name: Optional[str] = None, version: Optional[int] = None, **kwargs: Any) -> None: """ :keyword name: The name of the metadata schema. :paramtype name: str @@ -2589,8 +2651,8 @@ def __init__( limit: Optional[int] = None, start_search_time: Optional[datetime.datetime] = None, end_search_time: Optional[datetime.datetime] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword query: Search job query. :paramtype query: str @@ -2616,7 +2678,7 @@ def __init__( class SearchSchemaValue(_serialization.Model): """Value object for schema results. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar name: The name of the schema. :vartype name: str @@ -2660,8 +2722,8 @@ def __init__( display_name: Optional[str] = None, type: Optional[str] = None, owner_type: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: The name of the schema. :paramtype name: str @@ -2703,8 +2765,8 @@ class SearchSort(_serialization.Model): } def __init__( - self, *, name: Optional[str] = None, order: Optional[Union[str, "_models.SearchSortEnum"]] = None, **kwargs - ): + self, *, name: Optional[str] = None, order: Optional[Union[str, "_models.SearchSortEnum"]] = None, **kwargs: Any + ) -> None: """ :keyword name: The name of the field the search query is sorted on. :paramtype name: str @@ -2731,8 +2793,8 @@ class SharedKeys(_serialization.Model): } def __init__( - self, *, primary_shared_key: Optional[str] = None, secondary_shared_key: Optional[str] = None, **kwargs - ): + self, *, primary_shared_key: Optional[str] = None, secondary_shared_key: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword primary_shared_key: The primary shared key of a workspace. :paramtype primary_shared_key: str @@ -2747,7 +2809,7 @@ def __init__( class StorageAccount(_serialization.Model): """Describes a storage account connection. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: The Azure Resource Manager ID of the storage account resource. Required. :vartype id: str @@ -2765,7 +2827,7 @@ class StorageAccount(_serialization.Model): "key": {"key": "key", "type": "str"}, } - def __init__(self, *, id: str, key: str, **kwargs): # pylint: disable=redefined-builtin + def __init__(self, *, id: str, key: str, **kwargs: Any) -> None: # pylint: disable=redefined-builtin """ :keyword id: The Azure Resource Manager ID of the storage account resource. Required. :paramtype id: str @@ -2783,7 +2845,7 @@ class StorageInsight(ProxyResource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -2831,8 +2893,8 @@ def __init__( containers: Optional[List[str]] = None, tables: Optional[List[str]] = None, storage_account: Optional["_models.StorageAccount"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword e_tag: The ETag of the storage insight. :paramtype e_tag: str @@ -2869,8 +2931,12 @@ class StorageInsightListResult(_serialization.Model): } def __init__( - self, *, value: Optional[List["_models.StorageInsight"]] = None, odata_next_link: Optional[str] = None, **kwargs - ): + self, + *, + value: Optional[List["_models.StorageInsight"]] = None, + odata_next_link: Optional[str] = None, + **kwargs: Any + ) -> None: """ :keyword value: A list of storage insight items. :paramtype value: list[~azure.mgmt.loganalytics.models.StorageInsight] @@ -2885,7 +2951,7 @@ def __init__( class StorageInsightStatus(_serialization.Model): """The status of the storage insight. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar state: The state of the storage insight connection to the workspace. Required. Known values are: "OK" and "ERROR". @@ -2904,8 +2970,8 @@ class StorageInsightStatus(_serialization.Model): } def __init__( - self, *, state: Union[str, "_models.StorageInsightState"], description: Optional[str] = None, **kwargs - ): + self, *, state: Union[str, "_models.StorageInsightState"], description: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword state: The state of the storage insight connection to the workspace. Required. Known values are: "OK" and "ERROR". @@ -2956,8 +3022,8 @@ def __init__( last_modified_by: Optional[str] = None, last_modified_by_type: Optional[Union[str, "_models.IdentityType"]] = None, last_modified_at: Optional[datetime.datetime] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword created_by: An identifier for the identity that created the resource. :paramtype created_by: str @@ -3021,8 +3087,8 @@ def __init__( last_modified_by: Optional[str] = None, last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, last_modified_at: Optional[datetime.datetime] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword created_by: The identity that created the resource. :paramtype created_by: str @@ -3054,7 +3120,7 @@ class Table(ProxyResource): # pylint: disable=too-many-instance-attributes Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -3066,7 +3132,7 @@ class Table(ProxyResource): # pylint: disable=too-many-instance-attributes :ivar retention_in_days: The table retention in days, between 4 and 730. Setting this property to -1 will default to the workspace retention. :vartype retention_in_days: int - :ivar total_retention_in_days: The table total retention in days, between 4 and 2555. Setting + :ivar total_retention_in_days: The table total retention in days, between 4 and 4383. Setting this property to -1 will default to table retention. :vartype total_retention_in_days: int :ivar archive_retention_in_days: The table data archive retention in days. Calculated as @@ -3087,7 +3153,8 @@ class Table(ProxyResource): # pylint: disable=too-many-instance-attributes :vartype schema: ~azure.mgmt.loganalytics.models.Schema :ivar provisioning_state: Table's current provisioning state. If set to 'updating', indicates a resource lock due to ongoing operation, forbidding any update to the table until the ongoing - operation is concluded. Known values are: "Updating", "InProgress", and "Succeeded". + operation is concluded. Known values are: "Updating", "InProgress", "Succeeded", and + "Deleting". :vartype provisioning_state: str or ~azure.mgmt.loganalytics.models.ProvisioningStateEnum :ivar retention_in_days_as_default: True - Value originates from workspace retention in days, False - Customer specific. @@ -3103,7 +3170,7 @@ class Table(ProxyResource): # pylint: disable=too-many-instance-attributes "type": {"readonly": True}, "system_data": {"readonly": True}, "retention_in_days": {"maximum": 730, "minimum": 4}, - "total_retention_in_days": {"maximum": 2555, "minimum": 4}, + "total_retention_in_days": {"maximum": 4383, "minimum": 4}, "archive_retention_in_days": {"readonly": True}, "result_statistics": {"readonly": True}, "last_plan_modified_date": {"readonly": True}, @@ -3140,13 +3207,13 @@ def __init__( restored_logs: Optional["_models.RestoredLogs"] = None, plan: Optional[Union[str, "_models.TablePlanEnum"]] = None, schema: Optional["_models.Schema"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword retention_in_days: The table retention in days, between 4 and 730. Setting this property to -1 will default to the workspace retention. :paramtype retention_in_days: int - :keyword total_retention_in_days: The table total retention in days, between 4 and 2555. + :keyword total_retention_in_days: The table total retention in days, between 4 and 4383. Setting this property to -1 will default to table retention. :paramtype total_retention_in_days: int :keyword search_results: Parameters of the search job that initiated this table. @@ -3186,7 +3253,7 @@ class TablesListResult(_serialization.Model): "value": {"key": "value", "type": "[Table]"}, } - def __init__(self, *, value: Optional[List["_models.Table"]] = None, **kwargs): + def __init__(self, *, value: Optional[List["_models.Table"]] = None, **kwargs: Any) -> None: """ :keyword value: A list of data tables. :paramtype value: list[~azure.mgmt.loganalytics.models.Table] @@ -3198,7 +3265,7 @@ def __init__(self, *, value: Optional[List["_models.Table"]] = None, **kwargs): class Tag(_serialization.Model): """A tag of a saved search. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar name: The tag name. Required. :vartype name: str @@ -3216,7 +3283,7 @@ class Tag(_serialization.Model): "value": {"key": "value", "type": "str"}, } - def __init__(self, *, name: str, value: str, **kwargs): + def __init__(self, *, name: str, value: str, **kwargs: Any) -> None: """ :keyword name: The tag name. Required. :paramtype name: str @@ -3229,7 +3296,8 @@ def __init__(self, *, name: str, value: str, **kwargs): class TagsResource(_serialization.Model): - """A container holding only the Tags for a resource, allowing the user to update the tags on a QueryPack instance. + """A container holding only the Tags for a resource, allowing the user to update the tags on a + QueryPack instance. :ivar tags: Resource tags. :vartype tags: dict[str, str] @@ -3239,7 +3307,7 @@ class TagsResource(_serialization.Model): "tags": {"key": "tags", "type": "{str}"}, } - def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs): + def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] @@ -3283,8 +3351,8 @@ def __init__( limit: Optional[float] = None, next_reset_time: Optional[datetime.datetime] = None, quota_period: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: The name of the metric. :paramtype name: ~azure.mgmt.loganalytics.models.MetricName @@ -3309,6 +3377,34 @@ def __init__( self.quota_period = quota_period +class UserAssignedIdentity(_serialization.Model): + """User assigned identity properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of the assigned identity. + :vartype principal_id: str + :ivar client_id: The client ID of the assigned identity. + :vartype client_id: str + """ + + _validation = { + "principal_id": {"readonly": True}, + "client_id": {"readonly": True}, + } + + _attribute_map = { + "principal_id": {"key": "principalId", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.principal_id = None + self.client_id = None + + class UserIdentityProperties(_serialization.Model): """User assigned identity properties. @@ -3330,7 +3426,7 @@ class UserIdentityProperties(_serialization.Model): "client_id": {"key": "clientId", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.principal_id = None @@ -3342,10 +3438,10 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -3398,7 +3494,7 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute :vartype features: ~azure.mgmt.loganalytics.models.WorkspaceFeatures :ivar default_data_collection_rule_resource_id: The resource ID of the default Data Collection Rule to use for this workspace. Expected format is - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. # pylint: disable=line-too-long :vartype default_data_collection_rule_resource_id: str """ @@ -3460,8 +3556,8 @@ def __init__( force_cmk_for_query: Optional[bool] = None, features: Optional["_models.WorkspaceFeatures"] = None, default_data_collection_rule_resource_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] @@ -3493,7 +3589,7 @@ def __init__( :paramtype features: ~azure.mgmt.loganalytics.models.WorkspaceFeatures :keyword default_data_collection_rule_resource_id: The resource ID of the default Data Collection Rule to use for this workspace. Expected format is - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. # pylint: disable=line-too-long :paramtype default_data_collection_rule_resource_id: str """ super().__init__(tags=tags, location=location, **kwargs) @@ -3541,7 +3637,7 @@ class WorkspaceCapping(_serialization.Model): "data_ingestion_status": {"key": "dataIngestionStatus", "type": "str"}, } - def __init__(self, *, daily_quota_gb: Optional[float] = None, **kwargs): + def __init__(self, *, daily_quota_gb: Optional[float] = None, **kwargs: Any) -> None: """ :keyword daily_quota_gb: The workspace daily quota for ingestion. :paramtype daily_quota_gb: float @@ -3593,8 +3689,8 @@ def __init__( enable_log_access_using_only_resource_permissions: Optional[bool] = None, cluster_resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -3632,7 +3728,7 @@ class WorkspaceListManagementGroupsResult(_serialization.Model): "value": {"key": "value", "type": "[ManagementGroup]"}, } - def __init__(self, *, value: Optional[List["_models.ManagementGroup"]] = None, **kwargs): + def __init__(self, *, value: Optional[List["_models.ManagementGroup"]] = None, **kwargs: Any) -> None: """ :keyword value: Gets or sets a list of management groups attached to the workspace. :paramtype value: list[~azure.mgmt.loganalytics.models.ManagementGroup] @@ -3652,7 +3748,7 @@ class WorkspaceListResult(_serialization.Model): "value": {"key": "value", "type": "[Workspace]"}, } - def __init__(self, *, value: Optional[List["_models.Workspace"]] = None, **kwargs): + def __init__(self, *, value: Optional[List["_models.Workspace"]] = None, **kwargs: Any) -> None: """ :keyword value: A list of workspaces. :paramtype value: list[~azure.mgmt.loganalytics.models.Workspace] @@ -3672,7 +3768,7 @@ class WorkspaceListUsagesResult(_serialization.Model): "value": {"key": "value", "type": "[UsageMetric]"}, } - def __init__(self, *, value: Optional[List["_models.UsageMetric"]] = None, **kwargs): + def __init__(self, *, value: Optional[List["_models.UsageMetric"]] = None, **kwargs: Any) -> None: """ :keyword value: Gets or sets a list of usage metrics for a workspace. :paramtype value: list[~azure.mgmt.loganalytics.models.UsageMetric] @@ -3687,7 +3783,7 @@ class WorkspacePatch(AzureEntityResource): # pylint: disable=too-many-instance- Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -3736,7 +3832,7 @@ class WorkspacePatch(AzureEntityResource): # pylint: disable=too-many-instance- :vartype features: ~azure.mgmt.loganalytics.models.WorkspaceFeatures :ivar default_data_collection_rule_resource_id: The resource ID of the default Data Collection Rule to use for this workspace. Expected format is - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. # pylint: disable=line-too-long :vartype default_data_collection_rule_resource_id: str """ @@ -3793,8 +3889,8 @@ def __init__( force_cmk_for_query: Optional[bool] = None, features: Optional["_models.WorkspaceFeatures"] = None, default_data_collection_rule_resource_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword identity: The identity of the resource. :paramtype identity: ~azure.mgmt.loganalytics.models.Identity @@ -3822,7 +3918,7 @@ def __init__( :paramtype features: ~azure.mgmt.loganalytics.models.WorkspaceFeatures :keyword default_data_collection_rule_resource_id: The resource ID of the default Data Collection Rule to use for this workspace. Expected format is - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. # pylint: disable=line-too-long :paramtype default_data_collection_rule_resource_id: str """ super().__init__(**kwargs) @@ -3846,7 +3942,7 @@ def __init__( class WorkspacePurgeBody(_serialization.Model): """Describes the body of a purge request for an App Insights Workspace. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar table: Table from which to purge data. Required. :vartype table: str @@ -3865,7 +3961,7 @@ class WorkspacePurgeBody(_serialization.Model): "filters": {"key": "filters", "type": "[WorkspacePurgeBodyFilters]"}, } - def __init__(self, *, table: str, filters: List["_models.WorkspacePurgeBodyFilters"], **kwargs): + def __init__(self, *, table: str, filters: List["_models.WorkspacePurgeBodyFilters"], **kwargs: Any) -> None: """ :keyword table: Table from which to purge data. Required. :paramtype table: str @@ -3909,8 +4005,8 @@ def __init__( operator: Optional[str] = None, value: Optional[Any] = None, key: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword column: The column of the table over which the given query should run. :paramtype column: str @@ -3935,7 +4031,7 @@ def __init__( class WorkspacePurgeResponse(_serialization.Model): """Response containing operationId for a specific purge action. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar operation_id: Id to use when querying for status for a particular purge operation. Required. @@ -3950,7 +4046,7 @@ class WorkspacePurgeResponse(_serialization.Model): "operation_id": {"key": "operationId", "type": "str"}, } - def __init__(self, *, operation_id: str, **kwargs): + def __init__(self, *, operation_id: str, **kwargs: Any) -> None: """ :keyword operation_id: Id to use when querying for status for a particular purge operation. Required. @@ -3963,7 +4059,7 @@ def __init__(self, *, operation_id: str, **kwargs): class WorkspacePurgeStatusResponse(_serialization.Model): """Response containing status for a specific purge operation. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar status: Status of the operation represented by the requested Id. Required. Known values are: "pending" and "completed". @@ -3978,7 +4074,7 @@ class WorkspacePurgeStatusResponse(_serialization.Model): "status": {"key": "status", "type": "str"}, } - def __init__(self, *, status: Union[str, "_models.PurgeState"], **kwargs): + def __init__(self, *, status: Union[str, "_models.PurgeState"], **kwargs: Any) -> None: """ :keyword status: Status of the operation represented by the requested Id. Required. Known values are: "pending" and "completed". @@ -3993,7 +4089,7 @@ class WorkspaceSku(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar name: The name of the SKU. Required. Known values are: "Free", "Standard", "Premium", "PerNode", "PerGB2018", "Standalone", "CapacityReservation", and "LACluster". @@ -4023,8 +4119,8 @@ def __init__( *, name: Union[str, "_models.WorkspaceSkuNameEnum"], capacity_reservation_level: Optional[Union[int, "_models.CapacityReservationLevel"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: The name of the SKU. Required. Known values are: "Free", "Standard", "Premium", "PerNode", "PerGB2018", "Standalone", "CapacityReservation", and "LACluster". diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_available_service_tiers_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_available_service_tiers_operations.py index ec04ed6e5ef4..f5a14985d4b7 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_available_service_tiers_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_available_service_tiers_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Optional, TypeVar +from typing import Any, Callable, Dict, List, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +43,7 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -68,7 +66,7 @@ def build_list_by_workspace_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -109,12 +107,11 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: list of AvailableServiceTier or the result of cls(response) :rtype: list[~azure.mgmt.loganalytics.models.AvailableServiceTier] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -125,23 +122,22 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[List[_models.AvailableServiceTier]] = kwargs.pop("cls", None) - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -150,13 +146,9 @@ def list_by_workspace( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("[AvailableServiceTier]", pipeline_response) + deserialized = self._deserialize("[AvailableServiceTier]", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/availableServiceTiers" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_clusters_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_clusters_operations.py index f2318049dc4e..0bdd628d0595 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_clusters_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_clusters_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +16,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,12 +31,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -46,7 +47,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -61,7 +62,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -76,7 +77,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -87,7 +88,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -104,7 +105,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -128,7 +129,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -147,7 +148,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -163,7 +164,7 @@ def build_delete_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -178,7 +179,7 @@ def build_get_request(resource_group_name: str, cluster_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -194,7 +195,7 @@ def build_get_request(resource_group_name: str, cluster_name: str, subscription_ "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -211,7 +212,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -228,7 +229,7 @@ def build_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -267,7 +268,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Cluster or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -275,10 +275,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -289,23 +289,20 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) @@ -315,10 +312,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -331,15 +329,10 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters" - } - @distributed_trace def list(self, **kwargs: Any) -> Iterable["_models.Cluster"]: """Gets the Log Analytics clusters in a subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Cluster or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -347,10 +340,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Cluster"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -361,22 +354,19 @@ def list(self, **kwargs: Any) -> Iterable["_models.Cluster"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) @@ -386,10 +376,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -402,12 +393,10 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/clusters"} - def _create_or_update_initial( - self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO], **kwargs: Any - ) -> Optional[_models.Cluster]: - error_map = { + self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO[bytes]], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -418,19 +407,19 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Cluster]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Cluster") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, @@ -438,39 +427,34 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Cluster", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return deserialized # type: ignore @overload def begin_create_or_update( @@ -495,14 +479,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -513,7 +489,7 @@ def begin_create_or_update( self, resource_group_name: str, cluster_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -527,18 +503,10 @@ def begin_create_or_update( :type cluster_name: str :param parameters: The parameters required to create or update a Log Analytics cluster. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -546,7 +514,7 @@ def begin_create_or_update( @distributed_trace def begin_create_or_update( - self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO], **kwargs: Any + self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO[bytes]], **kwargs: Any ) -> LROPoller[_models.Cluster]: """Create or update a Log Analytics cluster. @@ -556,19 +524,8 @@ def begin_create_or_update( :param cluster_name: The name of the Log Analytics cluster. Required. :type cluster_name: str :param parameters: The parameters required to create or update a Log Analytics cluster. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.Cluster or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + either a Cluster type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.Cluster or IO[bytes] :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -576,7 +533,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -594,12 +551,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -609,22 +567,18 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.Cluster].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return LROPoller[_models.Cluster]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: - error_map = { + def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -635,38 +589,42 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> LROPoller[None]: @@ -677,14 +635,6 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An :type resource_group_name: str :param cluster_name: Name of the Log Analytics Cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -692,13 +642,13 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -707,11 +657,12 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -720,17 +671,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _models.Cluster: @@ -741,12 +688,11 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo :type resource_group_name: str :param cluster_name: Name of the Log Analytics Cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Cluster or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Cluster :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -757,23 +703,22 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -783,21 +728,21 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return deserialized # type: ignore def _update_initial( - self, resource_group_name: str, cluster_name: str, parameters: Union[_models.ClusterPatch, IO], **kwargs: Any - ) -> _models.Cluster: - error_map = { + self, + resource_group_name: str, + cluster_name: str, + parameters: Union[_models.ClusterPatch, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -808,19 +753,19 @@ def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ClusterPatch") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, @@ -828,34 +773,34 @@ def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return deserialized # type: ignore @overload def begin_update( @@ -879,14 +824,6 @@ def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -897,7 +834,7 @@ def begin_update( self, resource_group_name: str, cluster_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -910,18 +847,10 @@ def begin_update( :param cluster_name: Name of the Log Analytics Cluster. Required. :type cluster_name: str :param parameters: The parameters required to patch a Log Analytics cluster. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -929,7 +858,11 @@ def begin_update( @distributed_trace def begin_update( - self, resource_group_name: str, cluster_name: str, parameters: Union[_models.ClusterPatch, IO], **kwargs: Any + self, + resource_group_name: str, + cluster_name: str, + parameters: Union[_models.ClusterPatch, IO[bytes]], + **kwargs: Any ) -> LROPoller[_models.Cluster]: """Updates a Log Analytics cluster. @@ -938,20 +871,9 @@ def begin_update( :type resource_group_name: str :param cluster_name: Name of the Log Analytics Cluster. Required. :type cluster_name: str - :param parameters: The parameters required to patch a Log Analytics cluster. Is either a model - type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.ClusterPatch or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: The parameters required to patch a Log Analytics cluster. Is either a + ClusterPatch type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.ClusterPatch or IO[bytes] :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -959,7 +881,7 @@ def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2021-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -977,12 +899,13 @@ def begin_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -992,14 +915,12 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.Cluster].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}" - } + return LROPoller[_models.Cluster]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_exports_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_exports_operations.py index 66e173ad795a..f7cb66c14785 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_exports_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_exports_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -46,7 +45,7 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +68,7 @@ def build_list_by_workspace_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -86,7 +85,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -118,7 +117,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -137,7 +136,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -161,7 +160,7 @@ def build_get_request( "dataExportName": _SERIALIZER.url("data_export_name", data_export_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -178,7 +177,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -202,7 +201,7 @@ def build_delete_request( "dataExportName": _SERIALIZER.url("data_export_name", data_export_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -243,7 +242,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataExport or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.DataExport] :raises ~azure.core.exceptions.HttpResponseError: @@ -251,10 +249,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.DataExportListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -265,24 +263,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("DataExportListResult", pipeline_response) @@ -292,10 +287,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -308,10 +304,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports" - } - @overload def create_or_update( self, @@ -337,7 +329,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataExport or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: @@ -349,7 +340,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, data_export_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -364,11 +355,10 @@ def create_or_update( :param data_export_name: The data export rule name. Required. :type data_export_name: str :param parameters: The parameters required to create or update a data export. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataExport or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: @@ -380,7 +370,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, data_export_name: str, - parameters: Union[_models.DataExport, IO], + parameters: Union[_models.DataExport, IO[bytes]], **kwargs: Any ) -> _models.DataExport: """Create or update a data export. @@ -392,18 +382,14 @@ def create_or_update( :type workspace_name: str :param data_export_name: The data export rule name. Required. :type data_export_name: str - :param parameters: The parameters required to create or update a data export. Is either a model - type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.DataExport or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: The parameters required to create or update a data export. Is either a + DataExport type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.DataExport or IO[bytes] :return: DataExport or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -414,19 +400,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataExport] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DataExport") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_export_name=data_export_name, @@ -435,15 +421,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -453,21 +438,13 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DataExport", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DataExport", pipeline_response) + deserialized = self._deserialize("DataExport", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}" - } - @distributed_trace def get( self, resource_group_name: str, workspace_name: str, data_export_name: str, **kwargs: Any @@ -481,12 +458,11 @@ def get( :type workspace_name: str :param data_export_name: The data export rule name. Required. :type data_export_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataExport or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -497,24 +473,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.DataExport] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_export_name=data_export_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -524,16 +499,12 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataExport", pipeline_response) + deserialized = self._deserialize("DataExport", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -548,12 +519,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param data_export_name: The data export rule name. Required. :type data_export_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -564,24 +534,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_export_name=data_export_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -592,8 +561,4 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}" - } + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_sources_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_sources_operations.py index fd431b93afe8..f022fbf95bf0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_sources_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_sources_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -46,7 +45,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -71,7 +70,7 @@ def build_create_or_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -89,7 +88,7 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) # Construct URL _url = kwargs.pop( "template_url", @@ -111,7 +110,7 @@ def build_delete_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -125,7 +124,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -149,7 +148,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -172,7 +171,7 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -195,7 +194,7 @@ def build_list_by_workspace_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["$filter"] = _SERIALIZER.query("filter", filter, "str") @@ -253,7 +252,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataSource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: @@ -265,7 +263,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, data_source_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -280,11 +278,10 @@ def create_or_update( :param data_source_name: The name of the datasource resource. Required. :type data_source_name: str :param parameters: The parameters required to create or update a datasource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataSource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: @@ -296,7 +293,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, data_source_name: str, - parameters: Union[_models.DataSource, IO], + parameters: Union[_models.DataSource, IO[bytes]], **kwargs: Any ) -> _models.DataSource: """Create or update a data source. @@ -308,18 +305,14 @@ def create_or_update( :type workspace_name: str :param data_source_name: The name of the datasource resource. Required. :type data_source_name: str - :param parameters: The parameters required to create or update a datasource. Is either a model - type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.DataSource or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: The parameters required to create or update a datasource. Is either a + DataSource type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.DataSource or IO[bytes] :return: DataSource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -330,19 +323,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataSource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DataSource") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_name=data_source_name, @@ -351,15 +344,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -368,21 +360,13 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DataSource", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DataSource", pipeline_response) + deserialized = self._deserialize("DataSource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}" - } - @distributed_trace def delete( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, data_source_name: str, **kwargs: Any @@ -396,12 +380,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param data_source_name: Name of the datasource. Required. :type data_source_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -412,24 +395,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_name=data_source_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -439,11 +421,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def get( @@ -458,12 +436,11 @@ def get( :type workspace_name: str :param data_source_name: Name of the datasource. Required. :type data_source_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataSource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -474,24 +451,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.DataSource] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_name=data_source_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -500,16 +476,12 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataSource", pipeline_response) + deserialized = self._deserialize("DataSource", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}" - } + return deserialized # type: ignore @distributed_trace def list_by_workspace( @@ -527,7 +499,6 @@ def list_by_workspace( :param skiptoken: Starting point of the collection of data source instances. Default value is None. :type skiptoken: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataSource or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.DataSource] :raises ~azure.core.exceptions.HttpResponseError: @@ -535,10 +506,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.DataSourceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -549,26 +520,23 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, filter=filter, skiptoken=skiptoken, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("DataSourceListResult", pipeline_response) @@ -578,10 +546,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -592,7 +561,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_deleted_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_deleted_workspaces_operations.py index a85453bedef2..edeb17269ab1 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_deleted_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_deleted_workspaces_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +19,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -44,7 +42,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -55,7 +53,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -70,7 +68,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -85,7 +83,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -119,7 +117,6 @@ def __init__(self, *args, **kwargs): def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: """Gets recently deleted workspaces in a subscription, available for recovery. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -127,10 +124,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -141,22 +138,19 @@ def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) @@ -166,10 +160,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -182,8 +177,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/deletedWorkspaces"} - @distributed_trace def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Workspace"]: """Gets recently deleted workspaces in a resource group, available for recovery. @@ -191,7 +184,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -199,10 +191,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -213,23 +205,20 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) @@ -239,10 +228,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -254,7 +244,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/deletedWorkspaces" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_gateways_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_gateways_operations.py index 253075fcbc5e..1d5a4d78fdd9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_gateways_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_gateways_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -44,7 +42,7 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) # Construct URL _url = kwargs.pop( "template_url", @@ -66,7 +64,7 @@ def build_delete_request( "gatewayId": _SERIALIZER.url("gateway_id", gateway_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -106,12 +104,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param gateway_id: The Log Analytics gateway Id. Required. :type gateway_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -122,24 +119,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, gateway_id=gateway_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -149,8 +145,4 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/gateways/{gatewayId}" - } + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_intelligence_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_intelligence_packs_operations.py index 2c411531d09f..0bfcf5191431 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_intelligence_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_intelligence_packs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Optional, TypeVar +from typing import Any, Callable, Dict, List, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -44,7 +42,7 @@ def build_disable_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) # Construct URL _url = kwargs.pop( "template_url", @@ -66,7 +64,7 @@ def build_disable_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -79,7 +77,7 @@ def build_enable_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) # Construct URL _url = kwargs.pop( "template_url", @@ -101,7 +99,7 @@ def build_enable_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -115,7 +113,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -138,7 +136,7 @@ def build_list_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -181,12 +179,11 @@ def disable( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param intelligence_pack_name: The name of the intelligence pack to be disabled. Required. :type intelligence_pack_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -197,24 +194,23 @@ def disable( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_disable_request( + _request = build_disable_request( resource_group_name=resource_group_name, workspace_name=workspace_name, intelligence_pack_name=intelligence_pack_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.disable.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -224,11 +220,7 @@ def disable( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - disable.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Disable" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def enable( # pylint: disable=inconsistent-return-statements @@ -243,12 +235,11 @@ def enable( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param intelligence_pack_name: The name of the intelligence pack to be enabled. Required. :type intelligence_pack_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -259,24 +250,23 @@ def enable( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_enable_request( + _request = build_enable_request( resource_group_name=resource_group_name, workspace_name=workspace_name, intelligence_pack_name=intelligence_pack_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.enable.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -286,11 +276,7 @@ def enable( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - enable.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Enable" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> List[_models.IntelligencePack]: @@ -302,12 +288,11 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: list of IntelligencePack or the result of cls(response) :rtype: list[~azure.mgmt.loganalytics.models.IntelligencePack] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -318,23 +303,22 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[List[_models.IntelligencePack]] = kwargs.pop("cls", None) - request = build_list_request( + _request = build_list_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -343,13 +327,9 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("[IntelligencePack]", pipeline_response) + deserialized = self._deserialize("[IntelligencePack]", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_services_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_services_operations.py index 058c03279fa5..2997e59abacf 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_services_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_services_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +16,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,12 +31,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -48,7 +49,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -73,7 +74,7 @@ def build_create_or_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -92,7 +93,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -116,7 +117,7 @@ def build_delete_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -133,7 +134,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -157,7 +158,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -174,7 +175,7 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -197,7 +198,7 @@ def build_list_by_workspace_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -232,10 +233,10 @@ def _create_or_update_initial( resource_group_name: str, workspace_name: str, linked_service_name: str, - parameters: Union[_models.LinkedService, IO], + parameters: Union[_models.LinkedService, IO[bytes]], **kwargs: Any - ) -> Optional[_models.LinkedService]: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -246,19 +247,19 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.LinkedService]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "LinkedService") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, linked_service_name=linked_service_name, @@ -267,38 +268,33 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("LinkedService", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return deserialized # type: ignore @overload def begin_create_or_update( @@ -325,14 +321,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either LinkedService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.LinkedService] @@ -345,7 +333,7 @@ def begin_create_or_update( resource_group_name: str, workspace_name: str, linked_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -360,18 +348,10 @@ def begin_create_or_update( :param linked_service_name: Name of the linkedServices resource. Required. :type linked_service_name: str :param parameters: The parameters required to create or update a linked service. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either LinkedService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.LinkedService] @@ -384,7 +364,7 @@ def begin_create_or_update( resource_group_name: str, workspace_name: str, linked_service_name: str, - parameters: Union[_models.LinkedService, IO], + parameters: Union[_models.LinkedService, IO[bytes]], **kwargs: Any ) -> LROPoller[_models.LinkedService]: """Create or update a linked service. @@ -397,19 +377,8 @@ def begin_create_or_update( :param linked_service_name: Name of the linkedServices resource. Required. :type linked_service_name: str :param parameters: The parameters required to create or update a linked service. Is either a - model type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.LinkedService or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + LinkedService type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.LinkedService or IO[bytes] :return: An instance of LROPoller that returns either LinkedService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.LinkedService] @@ -418,7 +387,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -437,12 +406,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = self._deserialize("LinkedService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -452,22 +422,20 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.LinkedService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return LROPoller[_models.LinkedService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) def _delete_initial( self, resource_group_name: str, workspace_name: str, linked_service_name: str, **kwargs: Any - ) -> Optional[_models.LinkedService]: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -478,44 +446,42 @@ def _delete_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) - cls: ClsType[Optional[_models.LinkedService]] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, linked_service_name=linked_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -530,14 +496,6 @@ def begin_delete( :type workspace_name: str :param linked_service_name: Name of the linked service. Required. :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either LinkedService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.LinkedService] @@ -546,7 +504,7 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -562,12 +520,13 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = self._deserialize("LinkedService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -577,17 +536,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.LinkedService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return LROPoller[_models.LinkedService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace def get( @@ -602,12 +559,11 @@ def get( :type workspace_name: str :param linked_service_name: Name of the linked service. Required. :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedService or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedService :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -618,24 +574,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, linked_service_name=linked_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -644,16 +599,12 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedService", pipeline_response) + deserialized = self._deserialize("LinkedService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}" - } + return deserialized # type: ignore @distributed_trace def list_by_workspace( @@ -666,7 +617,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LinkedService or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.LinkedService] :raises ~azure.core.exceptions.HttpResponseError: @@ -674,10 +624,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedServiceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -688,24 +638,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("LinkedServiceListResult", pipeline_response) @@ -715,10 +662,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -729,7 +677,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_storage_accounts_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_storage_accounts_operations.py index 94116a32261d..c98231c49c0b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_storage_accounts_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_storage_accounts_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -50,7 +49,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -75,7 +74,7 @@ def build_create_or_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -97,7 +96,7 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) # Construct URL _url = kwargs.pop( "template_url", @@ -119,7 +118,7 @@ def build_delete_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -137,7 +136,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -161,7 +160,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -178,7 +177,7 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -201,7 +200,7 @@ def build_list_by_workspace_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -259,7 +258,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedStorageAccountsResource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: @@ -271,7 +269,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, data_source_type: Union[str, _models.DataSourceType], - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -289,11 +287,10 @@ def create_or_update( :type data_source_type: str or ~azure.mgmt.loganalytics.models.DataSourceType :param parameters: The parameters required to create or update linked storage accounts. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedStorageAccountsResource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: @@ -305,7 +302,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, data_source_type: Union[str, _models.DataSourceType], - parameters: Union[_models.LinkedStorageAccountsResource, IO], + parameters: Union[_models.LinkedStorageAccountsResource, IO[bytes]], **kwargs: Any ) -> _models.LinkedStorageAccountsResource: """Create or Update a link relation between current workspace and a group of storage accounts of a @@ -320,17 +317,13 @@ def create_or_update( "AzureWatson", "Query", "Ingestion", and "Alerts". Required. :type data_source_type: str or ~azure.mgmt.loganalytics.models.DataSourceType :param parameters: The parameters required to create or update linked storage accounts. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + either a LinkedStorageAccountsResource type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource or IO[bytes] :return: LinkedStorageAccountsResource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -341,19 +334,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedStorageAccountsResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "LinkedStorageAccountsResource") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_type=data_source_type, @@ -362,15 +355,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -379,16 +371,12 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedStorageAccountsResource", pipeline_response) + deserialized = self._deserialize("LinkedStorageAccountsResource", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -409,12 +397,11 @@ def delete( # pylint: disable=inconsistent-return-statements :param data_source_type: Linked storage accounts type. Known values are: "CustomLogs", "AzureWatson", "Query", "Ingestion", and "Alerts". Required. :type data_source_type: str or ~azure.mgmt.loganalytics.models.DataSourceType - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -425,24 +412,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_type=data_source_type, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -452,11 +438,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def get( @@ -477,12 +459,11 @@ def get( :param data_source_type: Linked storage accounts type. Known values are: "CustomLogs", "AzureWatson", "Query", "Ingestion", and "Alerts". Required. :type data_source_type: str or ~azure.mgmt.loganalytics.models.DataSourceType - :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedStorageAccountsResource or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -493,24 +474,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedStorageAccountsResource] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, data_source_type=data_source_type, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -519,16 +499,12 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedStorageAccountsResource", pipeline_response) + deserialized = self._deserialize("LinkedStorageAccountsResource", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}" - } + return deserialized # type: ignore @distributed_trace def list_by_workspace( @@ -542,7 +518,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LinkedStorageAccountsResource or the result of cls(response) :rtype: @@ -552,10 +527,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.LinkedStorageAccountsListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -566,24 +541,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("LinkedStorageAccountsListResult", pipeline_response) @@ -593,10 +565,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -607,7 +580,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_management_groups_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_management_groups_operations.py index 0f23e39fb674..59f63d691f69 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_management_groups_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_management_groups_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +19,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -46,7 +44,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +67,7 @@ def build_list_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -108,7 +106,6 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ManagementGroup or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.ManagementGroup] :raises ~azure.core.exceptions.HttpResponseError: @@ -116,10 +113,10 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.WorkspaceListManagementGroupsResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -130,24 +127,21 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListManagementGroupsResult", pipeline_response) @@ -157,10 +151,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -171,7 +166,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/managementGroups" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operation_statuses_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operation_statuses_operations.py index b5be8d50a46e..a8afe095866c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operation_statuses_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operation_statuses_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -43,7 +41,7 @@ def build_get_request(location: str, async_operation_id: str, subscription_id: s _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -57,7 +55,7 @@ def build_get_request(location: str, async_operation_id: str, subscription_id: s "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -95,12 +93,11 @@ def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _models. :type location: str :param async_operation_id: The operation Id. Required. :type async_operation_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: OperationStatus or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.OperationStatus :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -111,23 +108,22 @@ def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _models. _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.OperationStatus] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( location=location, async_operation_id=async_operation_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -136,13 +132,9 @@ def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _models. map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("OperationStatus", pipeline_response) + deserialized = self._deserialize("OperationStatus", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/locations/{location}/operationStatuses/{asyncOperationId}" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operations.py index d2fa411d4679..3e71b8ff6954 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +19,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -44,7 +42,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -82,7 +80,6 @@ def __init__(self, *args, **kwargs): def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: """Lists all of the available OperationalInsights Rest API operations. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Operation or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.Operation] :raises ~azure.core.exceptions.HttpResponseError: @@ -90,10 +87,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -104,21 +101,18 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("OperationListResult", pipeline_response) @@ -128,10 +122,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -143,5 +138,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list.metadata = {"url": "/providers/Microsoft.OperationalInsights/operations"} diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_queries_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_queries_operations.py index c385dea26e3b..03499676d0f3 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_queries_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_queries_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -53,7 +52,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +68,7 @@ def build_list_request( "queryPackName": _SERIALIZER.url("query_pack_name", query_pack_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -99,7 +98,7 @@ def build_search_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -116,7 +115,7 @@ def build_search_request( "queryPackName": _SERIALIZER.url("query_pack_name", query_pack_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -141,7 +140,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -158,7 +157,7 @@ def build_get_request( "id": _SERIALIZER.url("id", id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -175,7 +174,7 @@ def build_put_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -193,7 +192,7 @@ def build_put_request( "id": _SERIALIZER.url("id", id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -212,7 +211,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -230,7 +229,7 @@ def build_update_request( "id": _SERIALIZER.url("id", id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -249,7 +248,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -266,7 +265,7 @@ def build_delete_request( "id": _SERIALIZER.url("id", id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -321,7 +320,6 @@ def list( :param skip_token: Base64 encoded token used to fetch the next page of items. Default is null. Default value is None. :type skip_token: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: @@ -331,10 +329,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPackQueryListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -345,7 +343,7 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, @@ -353,19 +351,16 @@ def prepare_request(next_link=None): include_body=include_body, skip_token=skip_token, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("LogAnalyticsQueryPackQueryListResult", pipeline_response) @@ -375,10 +370,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -391,10 +387,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries" - } - @overload def search( self, @@ -431,7 +423,6 @@ def search( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: @@ -444,7 +435,7 @@ def search( self, resource_group_name: str, query_pack_name: str, - query_search_properties: IO, + query_search_properties: IO[bytes], top: Optional[int] = None, include_body: Optional[bool] = None, skip_token: Optional[str] = None, @@ -462,7 +453,7 @@ def search( :type query_pack_name: str :param query_search_properties: Properties by which to search queries in the given Log Analytics QueryPack. Required. - :type query_search_properties: IO + :type query_search_properties: IO[bytes] :param top: Maximum items returned in page. Default value is None. :type top: int :param include_body: Flag indicating whether or not to return the body of each applicable @@ -474,7 +465,6 @@ def search( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: @@ -487,7 +477,7 @@ def search( self, resource_group_name: str, query_pack_name: str, - query_search_properties: Union[_models.LogAnalyticsQueryPackQuerySearchProperties, IO], + query_search_properties: Union[_models.LogAnalyticsQueryPackQuerySearchProperties, IO[bytes]], top: Optional[int] = None, include_body: Optional[bool] = None, skip_token: Optional[str] = None, @@ -502,9 +492,10 @@ def search( :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str :param query_search_properties: Properties by which to search queries in the given Log - Analytics QueryPack. Is either a model type or a IO type. Required. + Analytics QueryPack. Is either a LogAnalyticsQueryPackQuerySearchProperties type or a IO[bytes] + type. Required. :type query_search_properties: - ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuerySearchProperties or IO + ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuerySearchProperties or IO[bytes] :param top: Maximum items returned in page. Default value is None. :type top: int :param include_body: Flag indicating whether or not to return the body of each applicable @@ -513,10 +504,6 @@ def search( :param skip_token: Base64 encoded token used to fetch the next page of items. Default is null. Default value is None. :type skip_token: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: @@ -526,11 +513,11 @@ def search( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQueryListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -540,7 +527,7 @@ def search( content_type = content_type or "application/json" _json = None _content = None - if isinstance(query_search_properties, (IO, bytes)): + if isinstance(query_search_properties, (IOBase, bytes)): _content = query_search_properties else: _json = self._serialize.body(query_search_properties, "LogAnalyticsQueryPackQuerySearchProperties") @@ -548,7 +535,7 @@ def search( def prepare_request(next_link=None): if not next_link: - request = build_search_request( + _request = build_search_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, @@ -559,19 +546,16 @@ def prepare_request(next_link=None): content_type=content_type, json=_json, content=_content, - template_url=self.search.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("LogAnalyticsQueryPackQueryListResult", pipeline_response) @@ -581,10 +565,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -597,10 +582,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - search.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/search" - } - @distributed_trace def get( self, resource_group_name: str, query_pack_name: str, id: str, **kwargs: Any @@ -614,12 +595,11 @@ def get( :type query_pack_name: str :param id: The id of a specific query defined in the Log Analytics QueryPack. Required. :type id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -630,24 +610,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, id=id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -657,16 +636,12 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}" - } + return deserialized # type: ignore @overload def put( @@ -694,7 +669,6 @@ def put( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: @@ -706,7 +680,7 @@ def put( resource_group_name: str, query_pack_name: str, id: str, - query_payload: IO, + query_payload: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -722,11 +696,10 @@ def put( :type id: str :param query_payload: Properties that need to be specified to create a new query and add it to a Log Analytics QueryPack. Required. - :type query_payload: IO + :type query_payload: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: @@ -738,7 +711,7 @@ def put( resource_group_name: str, query_pack_name: str, id: str, - query_payload: Union[_models.LogAnalyticsQueryPackQuery, IO], + query_payload: Union[_models.LogAnalyticsQueryPackQuery, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPackQuery: """Adds or Updates a specific Query within a Log Analytics QueryPack. @@ -751,17 +724,14 @@ def put( :param id: The id of a specific query defined in the Log Analytics QueryPack. Required. :type id: str :param query_payload: Properties that need to be specified to create a new query and add it to - a Log Analytics QueryPack. Is either a model type or a IO type. Required. - :type query_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + a Log Analytics QueryPack. Is either a LogAnalyticsQueryPackQuery type or a IO[bytes] type. + Required. + :type query_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery or IO[bytes] :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -772,19 +742,19 @@ def put( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(query_payload, (IO, bytes)): + if isinstance(query_payload, (IOBase, bytes)): _content = query_payload else: _json = self._serialize.body(query_payload, "LogAnalyticsQueryPackQuery") - request = build_put_request( + _request = build_put_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, id=id, @@ -793,15 +763,14 @@ def put( content_type=content_type, json=_json, content=_content, - template_url=self.put.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -811,16 +780,12 @@ def put( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - put.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}" - } + return deserialized # type: ignore @overload def update( @@ -848,7 +813,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: @@ -860,7 +824,7 @@ def update( resource_group_name: str, query_pack_name: str, id: str, - query_payload: IO, + query_payload: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -876,11 +840,10 @@ def update( :type id: str :param query_payload: Properties that need to be specified to create a new query and add it to a Log Analytics QueryPack. Required. - :type query_payload: IO + :type query_payload: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: @@ -892,7 +855,7 @@ def update( resource_group_name: str, query_pack_name: str, id: str, - query_payload: Union[_models.LogAnalyticsQueryPackQuery, IO], + query_payload: Union[_models.LogAnalyticsQueryPackQuery, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPackQuery: """Adds or Updates a specific Query within a Log Analytics QueryPack. @@ -905,17 +868,14 @@ def update( :param id: The id of a specific query defined in the Log Analytics QueryPack. Required. :type id: str :param query_payload: Properties that need to be specified to create a new query and add it to - a Log Analytics QueryPack. Is either a model type or a IO type. Required. - :type query_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + a Log Analytics QueryPack. Is either a LogAnalyticsQueryPackQuery type or a IO[bytes] type. + Required. + :type query_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery or IO[bytes] :return: LogAnalyticsQueryPackQuery or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -926,19 +886,19 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(query_payload, (IO, bytes)): + if isinstance(query_payload, (IOBase, bytes)): _content = query_payload else: _json = self._serialize.body(query_payload, "LogAnalyticsQueryPackQuery") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, id=id, @@ -947,15 +907,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -965,16 +924,12 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -989,12 +944,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type query_pack_name: str :param id: The id of a specific query defined in the Log Analytics QueryPack. Required. :type id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1005,24 +959,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, id=id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1033,8 +986,4 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}" - } + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_query_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_query_packs_operations.py index a3acbdcfeb7d..5c8352b33be0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_query_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_query_packs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -44,7 +43,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -55,7 +54,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -70,7 +69,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -85,7 +84,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -96,13 +95,13 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_create_or_update_without_name_request( +def build_create_or_update_without_name_request( # pylint: disable=name-too-long resource_group_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -118,7 +117,7 @@ def build_create_or_update_without_name_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -137,7 +136,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -153,7 +152,7 @@ def build_delete_request( "queryPackName": _SERIALIZER.url("query_pack_name", query_pack_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -170,7 +169,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -186,7 +185,7 @@ def build_get_request( "queryPackName": _SERIALIZER.url("query_pack_name", query_pack_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -203,7 +202,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -220,7 +219,7 @@ def build_create_or_update_request( "queryPackName": _SERIALIZER.url("query_pack_name", query_pack_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -239,7 +238,7 @@ def build_update_tags_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -256,7 +255,7 @@ def build_update_tags_request( "queryPackName": _SERIALIZER.url("query_pack_name", query_pack_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -292,7 +291,6 @@ def __init__(self, *args, **kwargs): def list(self, **kwargs: Any) -> Iterable["_models.LogAnalyticsQueryPack"]: """Gets a list of all Log Analytics QueryPacks within a subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack] @@ -301,10 +299,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.LogAnalyticsQueryPack"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPackListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -315,22 +313,19 @@ def list(self, **kwargs: Any) -> Iterable["_models.LogAnalyticsQueryPack"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("LogAnalyticsQueryPackListResult", pipeline_response) @@ -340,10 +335,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -356,8 +352,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/queryPacks"} - @distributed_trace def list_by_resource_group( self, resource_group_name: str, **kwargs: Any @@ -367,7 +361,6 @@ def list_by_resource_group( :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack] @@ -376,10 +369,10 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPackListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -390,23 +383,20 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("LogAnalyticsQueryPackListResult", pipeline_response) @@ -416,10 +406,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -432,10 +423,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks" - } - @overload def create_or_update_without_name( self, @@ -457,7 +444,6 @@ def create_or_update_without_name( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -467,7 +453,7 @@ def create_or_update_without_name( def create_or_update_without_name( self, resource_group_name: str, - log_analytics_query_pack_payload: IO, + log_analytics_query_pack_payload: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -480,11 +466,10 @@ def create_or_update_without_name( :type resource_group_name: str :param log_analytics_query_pack_payload: Properties that need to be specified to create or update a Log Analytics QueryPack. Required. - :type log_analytics_query_pack_payload: IO + :type log_analytics_query_pack_payload: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -494,7 +479,7 @@ def create_or_update_without_name( def create_or_update_without_name( self, resource_group_name: str, - log_analytics_query_pack_payload: Union[_models.LogAnalyticsQueryPack, IO], + log_analytics_query_pack_payload: Union[_models.LogAnalyticsQueryPack, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPack: """Creates a Log Analytics QueryPack. Note: You cannot specify a different value for @@ -504,18 +489,15 @@ def create_or_update_without_name( Required. :type resource_group_name: str :param log_analytics_query_pack_payload: Properties that need to be specified to create or - update a Log Analytics QueryPack. Is either a model type or a IO type. Required. + update a Log Analytics QueryPack. Is either a LogAnalyticsQueryPack type or a IO[bytes] type. + Required. :type log_analytics_query_pack_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack - or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + or IO[bytes] :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -526,34 +508,33 @@ def create_or_update_without_name( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(log_analytics_query_pack_payload, (IO, bytes)): + if isinstance(log_analytics_query_pack_payload, (IOBase, bytes)): _content = log_analytics_query_pack_payload else: _json = self._serialize.body(log_analytics_query_pack_payload, "LogAnalyticsQueryPack") - request = build_create_or_update_without_name_request( + _request = build_create_or_update_without_name_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update_without_name.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -563,16 +544,12 @@ def create_or_update_without_name( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update_without_name.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -585,12 +562,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type resource_group_name: str :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -601,23 +577,22 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -628,11 +603,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def get(self, resource_group_name: str, query_pack_name: str, **kwargs: Any) -> _models.LogAnalyticsQueryPack: @@ -643,12 +614,11 @@ def get(self, resource_group_name: str, query_pack_name: str, **kwargs: Any) -> :type resource_group_name: str :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -659,23 +629,22 @@ def get(self, resource_group_name: str, query_pack_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -685,16 +654,12 @@ def get(self, resource_group_name: str, query_pack_name: str, **kwargs: Any) -> error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}" - } + return deserialized # type: ignore @overload def create_or_update( @@ -720,7 +685,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -731,7 +695,7 @@ def create_or_update( self, resource_group_name: str, query_pack_name: str, - log_analytics_query_pack_payload: IO, + log_analytics_query_pack_payload: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -746,11 +710,10 @@ def create_or_update( :type query_pack_name: str :param log_analytics_query_pack_payload: Properties that need to be specified to create or update a Log Analytics QueryPack. Required. - :type log_analytics_query_pack_payload: IO + :type log_analytics_query_pack_payload: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -761,7 +724,7 @@ def create_or_update( self, resource_group_name: str, query_pack_name: str, - log_analytics_query_pack_payload: Union[_models.LogAnalyticsQueryPack, IO], + log_analytics_query_pack_payload: Union[_models.LogAnalyticsQueryPack, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPack: """Creates (or updates) a Log Analytics QueryPack. Note: You cannot specify a different value for @@ -773,18 +736,15 @@ def create_or_update( :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str :param log_analytics_query_pack_payload: Properties that need to be specified to create or - update a Log Analytics QueryPack. Is either a model type or a IO type. Required. + update a Log Analytics QueryPack. Is either a LogAnalyticsQueryPack type or a IO[bytes] type. + Required. :type log_analytics_query_pack_payload: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack - or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + or IO[bytes] :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -795,19 +755,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(log_analytics_query_pack_payload, (IO, bytes)): + if isinstance(log_analytics_query_pack_payload, (IOBase, bytes)): _content = log_analytics_query_pack_payload else: _json = self._serialize.body(log_analytics_query_pack_payload, "LogAnalyticsQueryPack") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, @@ -815,15 +775,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -833,16 +792,12 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}" - } + return deserialized # type: ignore @overload def update_tags( @@ -866,7 +821,6 @@ def update_tags( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -877,7 +831,7 @@ def update_tags( self, resource_group_name: str, query_pack_name: str, - query_pack_tags: IO, + query_pack_tags: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -890,11 +844,10 @@ def update_tags( :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str :param query_pack_tags: Updated tag information to set into the QueryPack instance. Required. - :type query_pack_tags: IO + :type query_pack_tags: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: @@ -905,7 +858,7 @@ def update_tags( self, resource_group_name: str, query_pack_name: str, - query_pack_tags: Union[_models.TagsResource, IO], + query_pack_tags: Union[_models.TagsResource, IO[bytes]], **kwargs: Any ) -> _models.LogAnalyticsQueryPack: """Updates an existing QueryPack's tags. To update other fields use the CreateOrUpdate method. @@ -916,17 +869,13 @@ def update_tags( :param query_pack_name: The name of the Log Analytics QueryPack resource. Required. :type query_pack_name: str :param query_pack_tags: Updated tag information to set into the QueryPack instance. Is either a - model type or a IO type. Required. - :type query_pack_tags: ~azure.mgmt.loganalytics.models.TagsResource or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + TagsResource type or a IO[bytes] type. Required. + :type query_pack_tags: ~azure.mgmt.loganalytics.models.TagsResource or IO[bytes] :return: LogAnalyticsQueryPack or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -937,19 +886,19 @@ def update_tags( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2019-09-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(query_pack_tags, (IO, bytes)): + if isinstance(query_pack_tags, (IOBase, bytes)): _content = query_pack_tags else: _json = self._serialize.body(query_pack_tags, "TagsResource") - request = build_update_tags_request( + _request = build_update_tags_request( resource_group_name=resource_group_name, query_pack_name=query_pack_name, subscription_id=self._config.subscription_id, @@ -957,15 +906,14 @@ def update_tags( content_type=content_type, json=_json, content=_content, - template_url=self.update_tags.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -975,13 +923,9 @@ def update_tags( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response) + deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - update_tags.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_saved_searches_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_saved_searches_operations.py index a919b44a9b62..95ff8c154536 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_saved_searches_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_saved_searches_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +19,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -44,7 +43,7 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) # Construct URL _url = kwargs.pop( "template_url", @@ -66,7 +65,7 @@ def build_delete_request( "savedSearchId": _SERIALIZER.url("saved_search_id", saved_search_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -80,7 +79,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -105,7 +104,7 @@ def build_create_or_update_request( "savedSearchId": _SERIALIZER.url("saved_search_id", saved_search_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -124,7 +123,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -148,7 +147,7 @@ def build_get_request( "savedSearchId": _SERIALIZER.url("saved_search_id", saved_search_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -165,7 +164,7 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -188,7 +187,7 @@ def build_list_by_workspace_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -231,12 +230,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param saved_search_id: The id of the saved search. Required. :type saved_search_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -247,24 +245,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, saved_search_id=saved_search_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -274,11 +271,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload def create_or_update( @@ -305,7 +298,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SavedSearch or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: @@ -317,7 +309,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, saved_search_id: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -332,11 +324,10 @@ def create_or_update( :param saved_search_id: The id of the saved search. Required. :type saved_search_id: str :param parameters: The parameters required to save a search. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SavedSearch or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: @@ -348,7 +339,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, saved_search_id: str, - parameters: Union[_models.SavedSearch, IO], + parameters: Union[_models.SavedSearch, IO[bytes]], **kwargs: Any ) -> _models.SavedSearch: """Creates or updates a saved search for a given workspace. @@ -360,18 +351,14 @@ def create_or_update( :type workspace_name: str :param saved_search_id: The id of the saved search. Required. :type saved_search_id: str - :param parameters: The parameters required to save a search. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.SavedSearch or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: The parameters required to save a search. Is either a SavedSearch type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SavedSearch or IO[bytes] :return: SavedSearch or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -382,19 +369,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SavedSearch] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "SavedSearch") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, saved_search_id=saved_search_id, @@ -403,15 +390,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -420,16 +406,12 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SavedSearch", pipeline_response) + deserialized = self._deserialize("SavedSearch", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}" - } + return deserialized # type: ignore @distributed_trace def get( @@ -444,12 +426,11 @@ def get( :type workspace_name: str :param saved_search_id: The id of the saved search. Required. :type saved_search_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SavedSearch or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -460,24 +441,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SavedSearch] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, saved_search_id=saved_search_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -486,16 +466,12 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SavedSearch", pipeline_response) + deserialized = self._deserialize("SavedSearch", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}" - } + return deserialized # type: ignore @distributed_trace def list_by_workspace( @@ -508,12 +484,11 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SavedSearchesListResult or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SavedSearchesListResult :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -524,23 +499,22 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SavedSearchesListResult] = kwargs.pop("cls", None) - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -549,13 +523,9 @@ def list_by_workspace( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SavedSearchesListResult", pipeline_response) + deserialized = self._deserialize("SavedSearchesListResult", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_schema_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_schema_operations.py index e2377a538608..256ec008a860 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_schema_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_schema_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +43,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -68,7 +66,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -107,12 +105,11 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SearchGetSchemaResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SearchGetSchemaResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -123,23 +120,22 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SearchGetSchemaResponse] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -148,13 +144,9 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SearchGetSchemaResponse", pipeline_response) + deserialized = self._deserialize("SearchGetSchemaResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/schema" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_shared_keys_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_shared_keys_operations.py index f31c34dffafc..46f987e354be 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_shared_keys_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_shared_keys_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +43,7 @@ def build_get_shared_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -68,7 +66,7 @@ def build_get_shared_keys_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -85,7 +83,7 @@ def build_regenerate_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -108,7 +106,7 @@ def build_regenerate_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -147,12 +145,11 @@ def get_shared_keys(self, resource_group_name: str, workspace_name: str, **kwarg :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SharedKeys or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SharedKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -163,23 +160,22 @@ def get_shared_keys(self, resource_group_name: str, workspace_name: str, **kwarg _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SharedKeys] = kwargs.pop("cls", None) - request = build_get_shared_keys_request( + _request = build_get_shared_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_shared_keys.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -188,16 +184,12 @@ def get_shared_keys(self, resource_group_name: str, workspace_name: str, **kwarg map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SharedKeys", pipeline_response) + deserialized = self._deserialize("SharedKeys", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get_shared_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/sharedKeys" - } + return deserialized # type: ignore @distributed_trace def regenerate(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.SharedKeys: @@ -209,12 +201,11 @@ def regenerate(self, resource_group_name: str, workspace_name: str, **kwargs: An :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SharedKeys or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.SharedKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -225,23 +216,22 @@ def regenerate(self, resource_group_name: str, workspace_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.SharedKeys] = kwargs.pop("cls", None) - request = build_regenerate_request( + _request = build_regenerate_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.regenerate.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -250,13 +240,9 @@ def regenerate(self, resource_group_name: str, workspace_name: str, **kwargs: An map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SharedKeys", pipeline_response) + deserialized = self._deserialize("SharedKeys", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - regenerate.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/regenerateSharedKey" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_storage_insight_configs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_storage_insight_configs_operations.py index 0b1230942419..3ccf5387e0b1 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_storage_insight_configs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_storage_insight_configs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -46,7 +45,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -71,7 +70,7 @@ def build_create_or_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -90,7 +89,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -114,7 +113,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -130,7 +129,7 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) # Construct URL _url = kwargs.pop( "template_url", @@ -152,7 +151,7 @@ def build_delete_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -166,7 +165,7 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -189,7 +188,7 @@ def build_list_by_workspace_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -244,7 +243,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageInsight or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: @@ -256,7 +254,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, storage_insight_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -271,11 +269,10 @@ def create_or_update( :param storage_insight_name: Name of the storageInsightsConfigs resource. Required. :type storage_insight_name: str :param parameters: The parameters required to create or update a storage insight. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageInsight or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: @@ -287,7 +284,7 @@ def create_or_update( resource_group_name: str, workspace_name: str, storage_insight_name: str, - parameters: Union[_models.StorageInsight, IO], + parameters: Union[_models.StorageInsight, IO[bytes]], **kwargs: Any ) -> _models.StorageInsight: """Create or update a storage insight. @@ -300,17 +297,13 @@ def create_or_update( :param storage_insight_name: Name of the storageInsightsConfigs resource. Required. :type storage_insight_name: str :param parameters: The parameters required to create or update a storage insight. Is either a - model type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.StorageInsight or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + StorageInsight type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.StorageInsight or IO[bytes] :return: StorageInsight or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -321,19 +314,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.StorageInsight] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "StorageInsight") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, storage_insight_name=storage_insight_name, @@ -342,15 +335,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -359,21 +351,13 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("StorageInsight", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("StorageInsight", pipeline_response) + deserialized = self._deserialize("StorageInsight", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}" - } - @distributed_trace def get( self, resource_group_name: str, workspace_name: str, storage_insight_name: str, **kwargs: Any @@ -387,12 +371,11 @@ def get( :type workspace_name: str :param storage_insight_name: Name of the storageInsightsConfigs resource. Required. :type storage_insight_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageInsight or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -403,24 +386,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.StorageInsight] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, storage_insight_name=storage_insight_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -429,16 +411,12 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("StorageInsight", pipeline_response) + deserialized = self._deserialize("StorageInsight", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -453,12 +431,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param storage_insight_name: Name of the storageInsightsConfigs resource. Required. :type storage_insight_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -469,24 +446,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, storage_insight_name=storage_insight_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -496,11 +472,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def list_by_workspace( @@ -513,7 +485,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either StorageInsight or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.StorageInsight] :raises ~azure.core.exceptions.HttpResponseError: @@ -521,10 +492,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.StorageInsightListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -535,24 +506,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("StorageInsightListResult", pipeline_response) @@ -562,10 +530,11 @@ def extract_data(pipeline_response): return deserialized.odata_next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -576,7 +545,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py index f599d48e8ecf..289ff15d3bf1 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +16,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,12 +31,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -48,7 +49,7 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -71,7 +72,7 @@ def build_list_by_workspace_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -88,7 +89,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -113,7 +114,7 @@ def build_create_or_update_request( "tableName": _SERIALIZER.url("table_name", table_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -132,7 +133,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -157,7 +158,7 @@ def build_update_request( "tableName": _SERIALIZER.url("table_name", table_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -176,7 +177,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -200,7 +201,7 @@ def build_get_request( "tableName": _SERIALIZER.url("table_name", table_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -217,7 +218,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -241,7 +242,7 @@ def build_delete_request( "tableName": _SERIALIZER.url("table_name", table_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -258,7 +259,7 @@ def build_migrate_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -282,7 +283,7 @@ def build_migrate_request( "tableName": _SERIALIZER.url("table_name", table_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -299,7 +300,7 @@ def build_cancel_search_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -323,7 +324,7 @@ def build_cancel_search_request( "tableName": _SERIALIZER.url("table_name", table_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -364,7 +365,6 @@ def list_by_workspace( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Table or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -372,10 +372,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.TablesListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -386,24 +386,21 @@ def list_by_workspace( def prepare_request(next_link=None): if not next_link: - request = build_list_by_workspace_request( + _request = build_list_by_workspace_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_workspace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("TablesListResult", pipeline_response) @@ -413,10 +410,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -429,19 +427,15 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_workspace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables" - } - def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, table_name: str, - parameters: Union[_models.Table, IO], + parameters: Union[_models.Table, IO[bytes]], **kwargs: Any - ) -> Optional[_models.Table]: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -452,19 +446,19 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Table]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Table") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, @@ -473,36 +467,34 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Table", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return deserialized # type: ignore @overload def begin_create_or_update( @@ -529,14 +521,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -548,7 +532,7 @@ def begin_create_or_update( resource_group_name: str, workspace_name: str, table_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -563,18 +547,10 @@ def begin_create_or_update( :param table_name: The name of the table. Required. :type table_name: str :param parameters: The parameters required to update table properties. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -586,7 +562,7 @@ def begin_create_or_update( resource_group_name: str, workspace_name: str, table_name: str, - parameters: Union[_models.Table, IO], + parameters: Union[_models.Table, IO[bytes]], **kwargs: Any ) -> LROPoller[_models.Table]: """Update or Create a Log Analytics workspace table. @@ -598,20 +574,9 @@ def begin_create_or_update( :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :param parameters: The parameters required to update table properties. Is either a model type - or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.Table or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: The parameters required to update table properties. Is either a Table type + or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.Table or IO[bytes] :return: An instance of LROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -619,7 +584,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -638,12 +603,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Table", pipeline_response) + deserialized = self._deserialize("Table", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -655,27 +621,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.Table].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return LROPoller[_models.Table]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) def _update_initial( self, resource_group_name: str, workspace_name: str, table_name: str, - parameters: Union[_models.Table, IO], + parameters: Union[_models.Table, IO[bytes]], **kwargs: Any - ) -> Optional[_models.Table]: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -686,19 +650,19 @@ def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Table]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Table") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, @@ -707,36 +671,34 @@ def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Table", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return deserialized # type: ignore @overload def begin_update( @@ -763,14 +725,6 @@ def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -782,7 +736,7 @@ def begin_update( resource_group_name: str, workspace_name: str, table_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -797,18 +751,10 @@ def begin_update( :param table_name: The name of the table. Required. :type table_name: str :param parameters: The parameters required to update table properties. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -820,7 +766,7 @@ def begin_update( resource_group_name: str, workspace_name: str, table_name: str, - parameters: Union[_models.Table, IO], + parameters: Union[_models.Table, IO[bytes]], **kwargs: Any ) -> LROPoller[_models.Table]: """Update a Log Analytics workspace table. @@ -832,20 +778,9 @@ def begin_update( :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :param parameters: The parameters required to update table properties. Is either a model type - or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.Table or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: The parameters required to update table properties. Is either a Table type + or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.Table or IO[bytes] :return: An instance of LROPoller that returns either Table or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Table] :raises ~azure.core.exceptions.HttpResponseError: @@ -853,7 +788,7 @@ def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -872,12 +807,13 @@ def begin_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Table", pipeline_response) + deserialized = self._deserialize("Table", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -889,17 +825,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.Table].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return LROPoller[_models.Table]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace def get(self, resource_group_name: str, workspace_name: str, table_name: str, **kwargs: Any) -> _models.Table: @@ -912,12 +846,11 @@ def get(self, resource_group_name: str, workspace_name: str, table_name: str, ** :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Table or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Table :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -928,24 +861,23 @@ def get(self, resource_group_name: str, workspace_name: str, table_name: str, ** _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.Table] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -955,21 +887,17 @@ def get(self, resource_group_name: str, workspace_name: str, table_name: str, ** error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Table", pipeline_response) + deserialized = self._deserialize("Table", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return deserialized # type: ignore - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, workspace_name: str, table_name: str, **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -980,39 +908,43 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -1027,14 +959,6 @@ def begin_delete( :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1042,13 +966,13 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, @@ -1058,11 +982,12 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast( @@ -1073,17 +998,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def migrate( # pylint: disable=inconsistent-return-statements @@ -1099,12 +1020,11 @@ def migrate( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1115,24 +1035,23 @@ def migrate( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_migrate_request( + _request = build_migrate_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.migrate.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1143,11 +1062,7 @@ def migrate( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - migrate.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}/migrate" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def cancel_search( # pylint: disable=inconsistent-return-statements @@ -1162,12 +1077,11 @@ def cancel_search( # pylint: disable=inconsistent-return-statements :type workspace_name: str :param table_name: The name of the table. Required. :type table_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1178,24 +1092,23 @@ def cancel_search( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_cancel_search_request( + _request = build_cancel_search_request( resource_group_name=resource_group_name, workspace_name=workspace_name, table_name=table_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.cancel_search.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1206,8 +1119,4 @@ def cancel_search( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - cancel_search.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}/cancelSearch" - } + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_usages_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_usages_operations.py index 5502401d84c0..8463ceba961f 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_usages_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_usages_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,20 +19,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -46,7 +44,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +67,7 @@ def build_list_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -108,7 +106,6 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either UsageMetric or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.UsageMetric] :raises ~azure.core.exceptions.HttpResponseError: @@ -116,10 +113,10 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.WorkspaceListUsagesResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -130,24 +127,21 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListUsagesResult", pipeline_response) @@ -157,10 +151,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -171,7 +166,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/usages" - } diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspace_purge_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspace_purge_operations.py index 096aeaeedbdb..55bb9b7f7dba 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspace_purge_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspace_purge_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +19,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +44,7 @@ def build_purge_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -69,7 +68,7 @@ def build_purge_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -88,7 +87,7 @@ def build_get_purge_status_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -112,7 +111,7 @@ def build_get_purge_status_request( "purgeId": _SERIALIZER.url("purge_id", purge_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -174,7 +173,6 @@ def purge( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: WorkspacePurgeResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -185,7 +183,7 @@ def purge( self, resource_group_name: str, workspace_name: str, - body: IO, + body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -208,11 +206,10 @@ def purge( :type workspace_name: str :param body: Describes the body of a request to purge data in a single table of an Log Analytics Workspace. Required. - :type body: IO + :type body: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: WorkspacePurgeResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -220,7 +217,11 @@ def purge( @distributed_trace def purge( - self, resource_group_name: str, workspace_name: str, body: Union[_models.WorkspacePurgeBody, IO], **kwargs: Any + self, + resource_group_name: str, + workspace_name: str, + body: Union[_models.WorkspacePurgeBody, IO[bytes]], + **kwargs: Any ) -> _models.WorkspacePurgeResponse: """Purges data in an Log Analytics workspace by a set of user-defined filters. @@ -239,17 +240,13 @@ def purge( :param workspace_name: The name of the workspace. Required. :type workspace_name: str :param body: Describes the body of a request to purge data in a single table of an Log - Analytics Workspace. Is either a model type or a IO type. Required. - :type body: ~azure.mgmt.loganalytics.models.WorkspacePurgeBody or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + Analytics Workspace. Is either a WorkspacePurgeBody type or a IO[bytes] type. Required. + :type body: ~azure.mgmt.loganalytics.models.WorkspacePurgeBody or IO[bytes] :return: WorkspacePurgeResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -260,19 +257,19 @@ def purge( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.WorkspacePurgeResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(body, (IO, bytes)): + if isinstance(body, (IOBase, bytes)): _content = body else: _json = self._serialize.body(body, "WorkspacePurgeBody") - request = build_purge_request( + _request = build_purge_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, @@ -280,15 +277,14 @@ def purge( content_type=content_type, json=_json, content=_content, - template_url=self.purge.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -302,16 +298,12 @@ def purge( "str", response.headers.get("x-ms-status-location") ) - deserialized = self._deserialize("WorkspacePurgeResponse", pipeline_response) + deserialized = self._deserialize("WorkspacePurgeResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - purge.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/purge" - } + return deserialized # type: ignore @distributed_trace def get_purge_status( @@ -327,12 +319,11 @@ def get_purge_status( :param purge_id: In a purge status request, this is the Id of the operation the status of which is returned. Required. :type purge_id: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: WorkspacePurgeStatusResponse or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeStatusResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -343,24 +334,23 @@ def get_purge_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2020-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) cls: ClsType[_models.WorkspacePurgeStatusResponse] = kwargs.pop("cls", None) - request = build_get_purge_status_request( + _request = build_get_purge_status_request( resource_group_name=resource_group_name, workspace_name=workspace_name, purge_id=purge_id, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_purge_status.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -369,13 +359,9 @@ def get_purge_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("WorkspacePurgeStatusResponse", pipeline_response) + deserialized = self._deserialize("WorkspacePurgeStatusResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get_purge_status.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/operations/{purgeId}" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspaces_operations.py index fbba9982cba4..af941812437c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspaces_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +16,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,12 +31,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -46,7 +47,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -57,7 +58,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -72,7 +73,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -87,7 +88,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -104,7 +105,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -128,7 +129,7 @@ def build_create_or_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -147,7 +148,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -170,7 +171,7 @@ def build_delete_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -189,7 +190,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -212,7 +213,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -229,7 +230,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -253,7 +254,7 @@ def build_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -289,7 +290,6 @@ def __init__(self, *args, **kwargs): def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: """Gets the workspaces in a subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -297,10 +297,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -311,22 +311,19 @@ def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) @@ -336,10 +333,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -352,8 +350,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/workspaces"} - @distributed_trace def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Workspace"]: """Gets workspaces in a resource group. @@ -361,7 +357,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -369,10 +364,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -383,23 +378,20 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request = HttpRequest("GET", next_link) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) @@ -409,10 +401,11 @@ def extract_data(pipeline_response): return None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -425,14 +418,14 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces" - } - def _create_or_update_initial( - self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any - ) -> Optional[_models.Workspace]: - error_map = { + self, + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.Workspace, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -443,19 +436,19 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Workspace") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, @@ -463,39 +456,34 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Workspace", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("Workspace", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return deserialized # type: ignore @overload def begin_create_or_update( @@ -519,14 +507,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -537,7 +517,7 @@ def begin_create_or_update( self, resource_group_name: str, workspace_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -550,18 +530,10 @@ def begin_create_or_update( :param workspace_name: The name of the workspace. Required. :type workspace_name: str :param parameters: The parameters required to create or update a workspace. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -569,7 +541,11 @@ def begin_create_or_update( @distributed_trace def begin_create_or_update( - self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + self, + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.Workspace, IO[bytes]], + **kwargs: Any ) -> LROPoller[_models.Workspace]: """Create or update a workspace. @@ -578,20 +554,9 @@ def begin_create_or_update( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param parameters: The parameters required to create or update a workspace. Is either a model - type or a IO type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.Workspace or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: The parameters required to create or update a workspace. Is either a + Workspace type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.Workspace or IO[bytes] :return: An instance of LROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: @@ -599,7 +564,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -617,12 +582,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Workspace", pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -632,22 +598,20 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.Workspace].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return LROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, workspace_name: str, force: Optional[bool] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -658,39 +622,43 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, force=force, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -709,14 +677,6 @@ def begin_delete( :param force: Deletes the workspace without the recovery option. A workspace that was deleted with this flag cannot be recovered. Default value is None. :type force: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -724,13 +684,13 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, force=force, @@ -740,11 +700,12 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -753,17 +714,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: @@ -774,12 +731,11 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -790,23 +746,22 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -816,16 +771,12 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Workspace", pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return deserialized # type: ignore @overload def update( @@ -849,7 +800,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: @@ -860,7 +810,7 @@ def update( self, resource_group_name: str, workspace_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -873,11 +823,10 @@ def update( :param workspace_name: The name of the workspace. Required. :type workspace_name: str :param parameters: The parameters required to patch a workspace. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: @@ -888,7 +837,7 @@ def update( self, resource_group_name: str, workspace_name: str, - parameters: Union[_models.WorkspacePatch, IO], + parameters: Union[_models.WorkspacePatch, IO[bytes]], **kwargs: Any ) -> _models.Workspace: """Updates a workspace. @@ -898,18 +847,14 @@ def update( :type resource_group_name: str :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param parameters: The parameters required to patch a workspace. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.loganalytics.models.WorkspacePatch or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: The parameters required to patch a workspace. Is either a WorkspacePatch + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.WorkspacePatch or IO[bytes] :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -920,19 +865,19 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "WorkspacePatch") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, @@ -940,15 +885,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -958,13 +902,9 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Workspace", pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" - } + return deserialized # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/dev_requirements.txt b/sdk/loganalytics/azure-mgmt-loganalytics/dev_requirements.txt index e06c927d8089..eea05d9a0cb1 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/dev_requirements.txt +++ b/sdk/loganalytics/azure-mgmt-loganalytics/dev_requirements.txt @@ -1,3 +1,4 @@ -e ../../../tools/azure-sdk-tools -e ../../resources/azure-mgmt-resource -../../identity/azure-identity \ No newline at end of file +../../identity/azure-identity +aiohttp \ No newline at end of file diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_create.py index 875f382a290e..3d9027cdcd17 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_create.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -26,7 +27,7 @@ def main(): client = LogAnalyticsManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-0000-0000-0000-00000000000", + subscription_id="53bc36c5-91e1-4d09-92c9-63b89e571926", ) response = client.clusters.begin_create_or_update( @@ -41,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2021-06-01/examples/ClustersCreate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersCreate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_delete.py index 90d70f176863..f6386e52f64b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -26,16 +27,15 @@ def main(): client = LogAnalyticsManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-0000-0000-0000-00000000000", + subscription_id="53bc36c5-91e1-4d09-92c9-63b89e571926", ) - response = client.clusters.begin_delete( + client.clusters.begin_delete( resource_group_name="oiautorest6685", cluster_name="oiautorest6685", ).result() - print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2021-06-01/examples/ClustersDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_get.py index cdd78fcd6ea7..98971c38db92 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -26,7 +27,7 @@ def main(): client = LogAnalyticsManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-0000-0000-0000-00000000000", + subscription_id="53bc36c5-91e1-4d09-92c9-63b89e571926", ) response = client.clusters.get( @@ -36,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2021-06-01/examples/ClustersGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_list_by_resource_group.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_list_by_resource_group.py index 79870b7e40c8..38cf9b85abcc 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_list_by_resource_group.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_list_by_resource_group.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -26,7 +27,7 @@ def main(): client = LogAnalyticsManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-0000-0000-0000-00000000000", + subscription_id="53bc36c5-91e1-4d09-92c9-63b89e571926", ) response = client.clusters.list_by_resource_group( @@ -36,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2021-06-01/examples/ClustersListByResourceGroup.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_subscription_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_subscription_list.py index fe017aff2e17..0bb8cf14db8a 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_subscription_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_subscription_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -26,7 +27,7 @@ def main(): client = LogAnalyticsManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-0000-0000-0000-00000000000", + subscription_id="53bc36c5-91e1-4d09-92c9-63b89e571926", ) response = client.clusters.list() @@ -34,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2021-06-01/examples/ClustersSubscriptionList.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersSubscriptionList.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_update.py index 78750fce8caa..b5274f3a2b8d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -26,7 +27,7 @@ def main(): client = LogAnalyticsManagementClient( credential=DefaultAzureCredential(), - subscription_id="00000000-0000-0000-0000-00000000000", + subscription_id="53bc36c5-91e1-4d09-92c9-63b89e571926", ) response = client.clusters.begin_update( @@ -36,7 +37,7 @@ def main(): "identity": { "type": "UserAssigned", "userAssignedIdentities": { - "/subscriptions/00000000-0000-0000-0000-00000000000/resourcegroups/oiautorest6685/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": {} + "/subscriptions/53bc36c5-91e1-4d09-92c9-63b89e571926/resourcegroups/oiautorest6685/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": {} }, }, "properties": { @@ -44,7 +45,7 @@ def main(): "keyName": "aztest2170cert", "keyRsaSize": 1024, "keyVaultUri": "https://aztest2170.vault.azure.net", - "keyVersion": "654ft6c4e63845cbb50fd6fg51540429", + "keyVersion": "", } }, "sku": {"capacity": 1000, "name": "CapacityReservation"}, @@ -54,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2021-06-01/examples/ClustersUpdate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersUpdate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_create_or_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_create_or_update.py index 9acda106741e..7b53bdaaca6f 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_create_or_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_create_or_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_delete.py index 2035ab8d427e..46c8b5ec5279 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.data_exports.delete( + client.data_exports.delete( resource_group_name="RgTest1", workspace_name="DeWnTest1234", data_export_name="export1", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataExportDelete.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_get.py index 881794ff51bd..88ccaa7e568c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_list_by_workspace.py index e7e3df8b5b30..f73fa3feebea 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_list_by_workspace.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_create.py index 07ba3f664106..e84362f87c38 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_create.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_delete.py index efd138178700..32d06490844e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.data_sources.delete( + client.data_sources.delete( resource_group_name="OIAutoRest5123", workspace_name="AzTest9724", data_source_name="AzTestDS774", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataSourcesDelete.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_get.py index b5a662419c63..358dacaa37c6 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_list_by_workspace.py index 313fb25225f1..370b11e7cc3b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_list_by_workspace.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_create.py index e0c666e70dae..ca45e507a7ec 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_create.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_delete.py index 2806c780c2bb..30a5412f442e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_get.py index 56886c809f33..b7df436bc6c6 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_list_by_workspace.py index cf8e655b4fe8..ec254154bed7 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_list_by_workspace.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_create.py index 49c51f8d3d98..a4975dd5a093 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_create.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_delete.py index bf1884881347..b512fca2adfd 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.linked_storage_accounts.delete( + client.linked_storage_accounts.delete( resource_group_name="mms-eus", workspace_name="testLinkStorageAccountsWS", data_source_type="CustomLogs", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/LinkedStorageAccountsDelete.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_get.py index 76a2a67d4d6a..91149cc477da 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_list_by_workspace.py index c73a3e19c35c..cb171feefe26 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_list_by_workspace.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operation_statuses_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operation_statuses_get.py index 1b0d0debbcf4..ca75de32507f 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operation_statuses_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operation_statuses_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operations_list_by_tenant.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operations_list_by_tenant.py index 7e27f9a29f5d..1a675593e69d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operations_list_by_tenant.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operations_list_by_tenant.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_delete.py index 92c9c3840c37..aa7d99adf1ba 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="86dc51d3-92ed-4d7e-947a-775ea79b4918", ) - response = client.queries.delete( + client.queries.delete( resource_group_name="my-resource-group", query_pack_name="my-querypack", id="a449f8af-8e64-4b3a-9b16-5a7165ff98c4", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesDelete.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_get.py index ad39af1a80ca..7cc2450995f0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_list.py index 8f4f7ba21a21..689351a85ad4 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_delete.py index da47a450e366..a19fc00230d5 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,11 +30,10 @@ def main(): subscription_id="86dc51d3-92ed-4d7e-947a-775ea79b4919", ) - response = client.query_packs.delete( + client.query_packs.delete( resource_group_name="my-resource-group", query_pack_name="my-querypack", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPacksDelete.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_get.py index 5bef72352e89..66fe5278a2ef 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list.py index 4cac24f11d78..e51ff5f26d01 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list_by_resource_group.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list_by_resource_group.py index ab69239fdc81..ba092ff61dd6 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list_by_resource_group.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list_by_resource_group.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_get_schema.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_get_schema.py index 8876d2021e55..edbd2af2b017 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_get_schema.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_get_schema.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_list_by_workspace.py index 7ff24ac0255e..34401c2cdfe2 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_list_by_workspace.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_create_or_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_create_or_update.py index 7daf0ec2c584..e3f5998755fc 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_create_or_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_create_or_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_delete.py index 36c2e1821878..29ba98854692 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.storage_insight_configs.delete( + client.storage_insight_configs.delete( resource_group_name="OIAutoRest5123", workspace_name="aztest5048", storage_insight_name="AzTestSI1110", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/StorageInsightsDelete.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_get.py index fc77fedcd028..cb700ea8c6d2 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_list_by_workspace.py index 68f8e738efa5..d0408e4b7899 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_list_by_workspace.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_delete.py index a4f09bfbd1e3..e0bfe4656fa6 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.tables.begin_delete( + client.tables.begin_delete( resource_group_name="oiautorest6685", workspace_name="oiautorest6685", table_name="table1_CL", ).result() - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/TablesDelete.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_get.py index 01a1835399d6..035130a6d7ec 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_list.py index a10f9528cb8a..d9debdcf1d2a 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_migrate.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_migrate.py index 8236ab88dbc9..47958c7c1bcb 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_migrate.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_migrate.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.tables.migrate( + client.tables.migrate( resource_group_name="oiautorest6685", workspace_name="oiautorest6685", table_name="table1_CL", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/TablesMigrate.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_search_cancel.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_search_cancel.py index 343807291013..e4d548d6240d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_search_cancel.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_search_cancel.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.tables.cancel_search( + client.tables.cancel_search( resource_group_name="oiautorest6685", workspace_name="oiautorest6685", table_name="table1_SRCH", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/TablesSearchCancel.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_upsert.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_upsert.py index d8c153546dbc..ec01780d1520 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_upsert.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_upsert.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,7 +30,7 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.tables.begin_create_or_update( + response = client.tables.begin_update( resource_group_name="oiautorest6685", workspace_name="oiautorest6685", table_name="AzureNetworkFlow", diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_available_service_tiers.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_available_service_tiers.py index 85a4a22b40c0..2dd225845d6c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_available_service_tiers.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_available_service_tiers.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_create.py index e07d706ea52b..22fb133340a8 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_create.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete.py index da7ff1698cd1..11b3d0ee7e07 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,11 +30,10 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.workspaces.begin_delete( + client.workspaces.begin_delete( resource_group_name="oiautorest6685", workspace_name="oiautorest6685", ).result() - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/WorkspacesDelete.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete_saved_searches.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete_saved_searches.py index 64cecff938ba..8b9fd9c84200 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete_saved_searches.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete_saved_searches.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.saved_searches.delete( + client.saved_searches.delete( resource_group_name="TestRG", workspace_name="TestWS", saved_search_id="00000000-0000-0000-0000-00000000000", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesDeleteSavedSearches.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_disable_intelligence_pack.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_disable_intelligence_pack.py index c08654b20198..77948284f902 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_disable_intelligence_pack.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_disable_intelligence_pack.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.intelligence_packs.disable( + client.intelligence_packs.disable( resource_group_name="rg1", workspace_name="TestLinkWS", intelligence_pack_name="ChangeTracking", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesDisableIntelligencePack.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_enable_intelligence_pack.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_enable_intelligence_pack.py index 0043157be774..f19ec4f6cb0f 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_enable_intelligence_pack.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_enable_intelligence_pack.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.intelligence_packs.enable( + client.intelligence_packs.enable( resource_group_name="rg1", workspace_name="TestLinkWS", intelligence_pack_name="ChangeTracking", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesEnableIntelligencePack.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_gateways_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_gateways_delete.py index cab4876e4784..516b776c582c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_gateways_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_gateways_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,12 +30,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.gateways.delete( + client.gateways.delete( resource_group_name="OIAutoRest5123", workspace_name="aztest5048", gateway_id="00000000-0000-0000-0000-00000000000", ) - print(response) # x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesGatewaysDelete.json diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get.py index 82f5748a0e5d..658a7fbd4fd0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get_shared_keys.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get_shared_keys.py index af6142f22997..08e6a242c781 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get_shared_keys.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get_shared_keys.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_by_resource_group.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_by_resource_group.py index 0bf1fef7f6ce..b71d980c3e4e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_by_resource_group.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_by_resource_group.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_intelligence_packs.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_intelligence_packs.py index 91184d88159d..3ed8bf4c2e7b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_intelligence_packs.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_intelligence_packs.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_management_groups.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_management_groups.py index b364e06fdf54..2b4f496106c1 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_management_groups.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_management_groups.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_usages.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_usages.py index ce973a31de9f..80d12940f217 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_usages.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_usages.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge.py index bde589aac0a5..5f2d589a4aca 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_operation.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_operation.py index 8201f3aceff4..845dc6de3fa0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_operation.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_operation.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_resource_id.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_resource_id.py index c218b3ceff86..7a47452ca322 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_resource_id.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_resource_id.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_regenerate_shared_keys.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_regenerate_shared_keys.py index d354d47d812e..75b952d527df 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_regenerate_shared_keys.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_regenerate_shared_keys.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_create_or_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_create_or_update.py index 8d5a718e5212..d254f65b0972 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_create_or_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_create_or_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_get.py index b9345ade07b9..fa6cb56271d0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_subscription_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_subscription_list.py index a13f6206380b..251fd49f9799 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_subscription_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_subscription_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ @@ -29,7 +30,7 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.workspaces.list() + response = client.deleted_workspaces.list() for item in response: print(item) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_update.py index 44e617867def..804286e0d7b0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.loganalytics import LogAnalyticsManagementClient """ diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/conftest.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/conftest.py new file mode 100644 index 000000000000..1fbe0ba3e8b2 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/conftest.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# aovid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + loganalyticsmanagement_subscription_id = os.environ.get( + "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + loganalyticsmanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + loganalyticsmanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + loganalyticsmanagement_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer( + regex=loganalyticsmanagement_subscription_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=loganalyticsmanagement_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loganalyticsmanagement_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer( + regex=loganalyticsmanagement_client_secret, value="00000000-0000-0000-0000-000000000000" + ) + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations.py new file mode 100644 index 000000000000..7227fc83cd7e --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementAvailableServiceTiersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_workspace(self, resource_group): + response = self.client.available_service_tiers.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations_async.py new file mode 100644 index 000000000000..45b08534c726 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementAvailableServiceTiersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_workspace(self, resource_group): + response = await self.client.available_service_tiers.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations.py new file mode 100644 index 000000000000..31742b02cb41 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementClustersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.clusters.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2022-10-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.clusters.list( + api_version="2022-10-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.clusters.begin_create_or_update( + resource_group_name=resource_group.name, + cluster_name="str", + parameters={ + "location": "str", + "associatedWorkspaces": [ + {"associateDate": "str", "resourceId": "str", "workspaceId": "str", "workspaceName": "str"} + ], + "billingType": "str", + "capacityReservationProperties": {"lastSkuUpdate": "str", "minCapacity": 0}, + "clusterId": "str", + "createdDate": "str", + "id": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "isAvailabilityZonesEnabled": bool, + "isDoubleEncryptionEnabled": bool, + "keyVaultProperties": {"keyName": "str", "keyRsaSize": 0, "keyVaultUri": "str", "keyVersion": "str"}, + "lastModifiedDate": "str", + "name": "str", + "provisioningState": "str", + "sku": {"capacity": 0, "name": "str"}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2022-10-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.clusters.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2022-10-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.clusters.get( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.clusters.begin_update( + resource_group_name=resource_group.name, + cluster_name="str", + parameters={ + "billingType": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "keyVaultProperties": {"keyName": "str", "keyRsaSize": 0, "keyVaultUri": "str", "keyVersion": "str"}, + "sku": {"capacity": 0, "name": "str"}, + "tags": {"str": "str"}, + }, + api_version="2022-10-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations_async.py new file mode 100644 index 000000000000..1cb078e0d3ce --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations_async.py @@ -0,0 +1,143 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementClustersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.clusters.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2022-10-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.clusters.list( + api_version="2022-10-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.clusters.begin_create_or_update( + resource_group_name=resource_group.name, + cluster_name="str", + parameters={ + "location": "str", + "associatedWorkspaces": [ + {"associateDate": "str", "resourceId": "str", "workspaceId": "str", "workspaceName": "str"} + ], + "billingType": "str", + "capacityReservationProperties": {"lastSkuUpdate": "str", "minCapacity": 0}, + "clusterId": "str", + "createdDate": "str", + "id": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "isAvailabilityZonesEnabled": bool, + "isDoubleEncryptionEnabled": bool, + "keyVaultProperties": { + "keyName": "str", + "keyRsaSize": 0, + "keyVaultUri": "str", + "keyVersion": "str", + }, + "lastModifiedDate": "str", + "name": "str", + "provisioningState": "str", + "sku": {"capacity": 0, "name": "str"}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2022-10-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.clusters.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2022-10-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.clusters.get( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.clusters.begin_update( + resource_group_name=resource_group.name, + cluster_name="str", + parameters={ + "billingType": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "keyVaultProperties": { + "keyName": "str", + "keyRsaSize": 0, + "keyVaultUri": "str", + "keyVersion": "str", + }, + "sku": {"capacity": 0, "name": "str"}, + "tags": {"str": "str"}, + }, + api_version="2022-10-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations.py new file mode 100644 index 000000000000..b38e545a8332 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementDataExportsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_workspace(self, resource_group): + response = self.client.data_exports.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.data_exports.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + data_export_name="str", + parameters={ + "createdDate": "str", + "dataExportId": "str", + "enable": bool, + "eventHubName": "str", + "id": "str", + "lastModifiedDate": "str", + "name": "str", + "resourceId": "str", + "tableNames": ["str"], + "type": "str", + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.data_exports.get( + resource_group_name=resource_group.name, + workspace_name="str", + data_export_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.data_exports.delete( + resource_group_name=resource_group.name, + workspace_name="str", + data_export_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations_async.py new file mode 100644 index 000000000000..c8c275998c56 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations_async.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementDataExportsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_workspace(self, resource_group): + response = self.client.data_exports.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.data_exports.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + data_export_name="str", + parameters={ + "createdDate": "str", + "dataExportId": "str", + "enable": bool, + "eventHubName": "str", + "id": "str", + "lastModifiedDate": "str", + "name": "str", + "resourceId": "str", + "tableNames": ["str"], + "type": "str", + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.data_exports.get( + resource_group_name=resource_group.name, + workspace_name="str", + data_export_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.data_exports.delete( + resource_group_name=resource_group.name, + workspace_name="str", + data_export_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations.py new file mode 100644 index 000000000000..641ca14768be --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementDataSourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.data_sources.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_name="str", + parameters={ + "kind": "str", + "properties": {}, + "etag": "str", + "id": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.data_sources.delete( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.data_sources.get( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_workspace(self, resource_group): + response = self.client.data_sources.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + filter="str", + api_version="2020-08-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations_async.py new file mode 100644 index 000000000000..eaadae597f26 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations_async.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementDataSourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.data_sources.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_name="str", + parameters={ + "kind": "str", + "properties": {}, + "etag": "str", + "id": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.data_sources.delete( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.data_sources.get( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_workspace(self, resource_group): + response = self.client.data_sources.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + filter="str", + api_version="2020-08-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations.py new file mode 100644 index 000000000000..e2655da89001 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementDeletedWorkspacesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.deleted_workspaces.list( + api_version="2022-10-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.deleted_workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2022-10-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations_async.py new file mode 100644 index 000000000000..40790285853a --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations_async.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementDeletedWorkspacesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.deleted_workspaces.list( + api_version="2022-10-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.deleted_workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2022-10-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations.py new file mode 100644 index 000000000000..206d2eb6ecab --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementGatewaysOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.gateways.delete( + resource_group_name=resource_group.name, + workspace_name="str", + gateway_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations_async.py new file mode 100644 index 000000000000..83924d573d7c --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations_async.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementGatewaysOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.gateways.delete( + resource_group_name=resource_group.name, + workspace_name="str", + gateway_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations.py new file mode 100644 index 000000000000..265bd2880bbe --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementIntelligencePacksOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_disable(self, resource_group): + response = self.client.intelligence_packs.disable( + resource_group_name=resource_group.name, + workspace_name="str", + intelligence_pack_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_enable(self, resource_group): + response = self.client.intelligence_packs.enable( + resource_group_name=resource_group.name, + workspace_name="str", + intelligence_pack_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.intelligence_packs.list( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations_async.py new file mode 100644 index 000000000000..d39adeee23b3 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations_async.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementIntelligencePacksOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_disable(self, resource_group): + response = await self.client.intelligence_packs.disable( + resource_group_name=resource_group.name, + workspace_name="str", + intelligence_pack_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_enable(self, resource_group): + response = await self.client.intelligence_packs.enable( + resource_group_name=resource_group.name, + workspace_name="str", + intelligence_pack_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = await self.client.intelligence_packs.list( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations.py new file mode 100644 index 000000000000..485f4c2f70f2 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementLinkedServicesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.linked_services.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + linked_service_name="str", + parameters={ + "id": "str", + "name": "str", + "provisioningState": "str", + "resourceId": "str", + "tags": {"str": "str"}, + "type": "str", + "writeAccessResourceId": "str", + }, + api_version="2020-08-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.linked_services.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + linked_service_name="str", + api_version="2020-08-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.linked_services.get( + resource_group_name=resource_group.name, + workspace_name="str", + linked_service_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_workspace(self, resource_group): + response = self.client.linked_services.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations_async.py new file mode 100644 index 000000000000..bd80aac4523c --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations_async.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementLinkedServicesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.linked_services.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + linked_service_name="str", + parameters={ + "id": "str", + "name": "str", + "provisioningState": "str", + "resourceId": "str", + "tags": {"str": "str"}, + "type": "str", + "writeAccessResourceId": "str", + }, + api_version="2020-08-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.linked_services.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + linked_service_name="str", + api_version="2020-08-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.linked_services.get( + resource_group_name=resource_group.name, + workspace_name="str", + linked_service_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_workspace(self, resource_group): + response = self.client.linked_services.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations.py new file mode 100644 index 000000000000..07442c313288 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementLinkedStorageAccountsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.linked_storage_accounts.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_type="str", + parameters={ + "dataSourceType": "str", + "id": "str", + "name": "str", + "storageAccountIds": ["str"], + "type": "str", + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.linked_storage_accounts.delete( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_type="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.linked_storage_accounts.get( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_type="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_workspace(self, resource_group): + response = self.client.linked_storage_accounts.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations_async.py new file mode 100644 index 000000000000..2f312b4bed51 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations_async.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementLinkedStorageAccountsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.linked_storage_accounts.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_type="str", + parameters={ + "dataSourceType": "str", + "id": "str", + "name": "str", + "storageAccountIds": ["str"], + "type": "str", + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.linked_storage_accounts.delete( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_type="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.linked_storage_accounts.get( + resource_group_name=resource_group.name, + workspace_name="str", + data_source_type="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_workspace(self, resource_group): + response = self.client.linked_storage_accounts.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations.py new file mode 100644 index 000000000000..ab224612c265 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementManagementGroupsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.management_groups.list( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations_async.py new file mode 100644 index 000000000000..dfbcf1d4b08a --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementManagementGroupsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.management_groups.list( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations.py new file mode 100644 index 000000000000..dcd1005b384e --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementOperationStatusesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.operation_statuses.get( + location="str", + async_operation_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations_async.py new file mode 100644 index 000000000000..b8c5b5191f30 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementOperationStatusesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.operation_statuses.get( + location="str", + async_operation_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations.py new file mode 100644 index 000000000000..a71bbdeaec2f --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2022-10-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations_async.py new file mode 100644 index 000000000000..566b357e7439 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations_async.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2022-10-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations.py new file mode 100644 index 000000000000..369cab55436b --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations.py @@ -0,0 +1,143 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementQueriesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.queries.list( + resource_group_name=resource_group.name, + query_pack_name="str", + api_version="2019-09-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_search(self, resource_group): + response = self.client.queries.search( + resource_group_name=resource_group.name, + query_pack_name="str", + query_search_properties={ + "related": {"categories": ["str"], "resourceTypes": ["str"], "solutions": ["str"]}, + "tags": {"str": ["str"]}, + }, + api_version="2019-09-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.queries.get( + resource_group_name=resource_group.name, + query_pack_name="str", + id="str", + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_put(self, resource_group): + response = self.client.queries.put( + resource_group_name=resource_group.name, + query_pack_name="str", + id="str", + query_payload={ + "author": "str", + "body": "str", + "description": "str", + "displayName": "str", + "id": "str", + "name": "str", + "properties": {}, + "related": {"categories": ["str"], "resourceTypes": ["str"], "solutions": ["str"]}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": ["str"]}, + "timeCreated": "2020-02-20 00:00:00", + "timeModified": "2020-02-20 00:00:00", + "type": "str", + }, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.queries.update( + resource_group_name=resource_group.name, + query_pack_name="str", + id="str", + query_payload={ + "author": "str", + "body": "str", + "description": "str", + "displayName": "str", + "id": "str", + "name": "str", + "properties": {}, + "related": {"categories": ["str"], "resourceTypes": ["str"], "solutions": ["str"]}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": ["str"]}, + "timeCreated": "2020-02-20 00:00:00", + "timeModified": "2020-02-20 00:00:00", + "type": "str", + }, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.queries.delete( + resource_group_name=resource_group.name, + query_pack_name="str", + id="str", + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations_async.py new file mode 100644 index 000000000000..d466472e6c15 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations_async.py @@ -0,0 +1,144 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementQueriesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.queries.list( + resource_group_name=resource_group.name, + query_pack_name="str", + api_version="2019-09-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_search(self, resource_group): + response = self.client.queries.search( + resource_group_name=resource_group.name, + query_pack_name="str", + query_search_properties={ + "related": {"categories": ["str"], "resourceTypes": ["str"], "solutions": ["str"]}, + "tags": {"str": ["str"]}, + }, + api_version="2019-09-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.queries.get( + resource_group_name=resource_group.name, + query_pack_name="str", + id="str", + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_put(self, resource_group): + response = await self.client.queries.put( + resource_group_name=resource_group.name, + query_pack_name="str", + id="str", + query_payload={ + "author": "str", + "body": "str", + "description": "str", + "displayName": "str", + "id": "str", + "name": "str", + "properties": {}, + "related": {"categories": ["str"], "resourceTypes": ["str"], "solutions": ["str"]}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": ["str"]}, + "timeCreated": "2020-02-20 00:00:00", + "timeModified": "2020-02-20 00:00:00", + "type": "str", + }, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.queries.update( + resource_group_name=resource_group.name, + query_pack_name="str", + id="str", + query_payload={ + "author": "str", + "body": "str", + "description": "str", + "displayName": "str", + "id": "str", + "name": "str", + "properties": {}, + "related": {"categories": ["str"], "resourceTypes": ["str"], "solutions": ["str"]}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": ["str"]}, + "timeCreated": "2020-02-20 00:00:00", + "timeModified": "2020-02-20 00:00:00", + "type": "str", + }, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.queries.delete( + resource_group_name=resource_group.name, + query_pack_name="str", + id="str", + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations.py new file mode 100644 index 000000000000..e201d3e79fdc --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementQueryPacksOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.query_packs.list( + api_version="2019-09-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.query_packs.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2019-09-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update_without_name(self, resource_group): + response = self.client.query_packs.create_or_update_without_name( + resource_group_name=resource_group.name, + log_analytics_query_pack_payload={ + "location": "str", + "id": "str", + "name": "str", + "provisioningState": "str", + "queryPackId": "str", + "tags": {"str": "str"}, + "timeCreated": "2020-02-20 00:00:00", + "timeModified": "2020-02-20 00:00:00", + "type": "str", + }, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.query_packs.delete( + resource_group_name=resource_group.name, + query_pack_name="str", + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.query_packs.get( + resource_group_name=resource_group.name, + query_pack_name="str", + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.query_packs.create_or_update( + resource_group_name=resource_group.name, + query_pack_name="str", + log_analytics_query_pack_payload={ + "location": "str", + "id": "str", + "name": "str", + "provisioningState": "str", + "queryPackId": "str", + "tags": {"str": "str"}, + "timeCreated": "2020-02-20 00:00:00", + "timeModified": "2020-02-20 00:00:00", + "type": "str", + }, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update_tags(self, resource_group): + response = self.client.query_packs.update_tags( + resource_group_name=resource_group.name, + query_pack_name="str", + query_pack_tags={"tags": {"str": "str"}}, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations_async.py new file mode 100644 index 000000000000..e40f2126a2f0 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations_async.py @@ -0,0 +1,123 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementQueryPacksOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.query_packs.list( + api_version="2019-09-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.query_packs.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2019-09-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update_without_name(self, resource_group): + response = await self.client.query_packs.create_or_update_without_name( + resource_group_name=resource_group.name, + log_analytics_query_pack_payload={ + "location": "str", + "id": "str", + "name": "str", + "provisioningState": "str", + "queryPackId": "str", + "tags": {"str": "str"}, + "timeCreated": "2020-02-20 00:00:00", + "timeModified": "2020-02-20 00:00:00", + "type": "str", + }, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.query_packs.delete( + resource_group_name=resource_group.name, + query_pack_name="str", + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.query_packs.get( + resource_group_name=resource_group.name, + query_pack_name="str", + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.query_packs.create_or_update( + resource_group_name=resource_group.name, + query_pack_name="str", + log_analytics_query_pack_payload={ + "location": "str", + "id": "str", + "name": "str", + "provisioningState": "str", + "queryPackId": "str", + "tags": {"str": "str"}, + "timeCreated": "2020-02-20 00:00:00", + "timeModified": "2020-02-20 00:00:00", + "type": "str", + }, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update_tags(self, resource_group): + response = await self.client.query_packs.update_tags( + resource_group_name=resource_group.name, + query_pack_name="str", + query_pack_tags={"tags": {"str": "str"}}, + api_version="2019-09-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations.py new file mode 100644 index 000000000000..732c7b0741e9 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementSavedSearchesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.saved_searches.delete( + resource_group_name=resource_group.name, + workspace_name="str", + saved_search_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.saved_searches.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + saved_search_id="str", + parameters={ + "category": "str", + "displayName": "str", + "query": "str", + "etag": "str", + "functionAlias": "str", + "functionParameters": "str", + "id": "str", + "name": "str", + "tags": [{"name": "str", "value": "str"}], + "type": "str", + "version": 0, + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.saved_searches.get( + resource_group_name=resource_group.name, + workspace_name="str", + saved_search_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_workspace(self, resource_group): + response = self.client.saved_searches.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations_async.py new file mode 100644 index 000000000000..7afeabdffb9f --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations_async.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementSavedSearchesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.saved_searches.delete( + resource_group_name=resource_group.name, + workspace_name="str", + saved_search_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.saved_searches.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + saved_search_id="str", + parameters={ + "category": "str", + "displayName": "str", + "query": "str", + "etag": "str", + "functionAlias": "str", + "functionParameters": "str", + "id": "str", + "name": "str", + "tags": [{"name": "str", "value": "str"}], + "type": "str", + "version": 0, + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.saved_searches.get( + resource_group_name=resource_group.name, + workspace_name="str", + saved_search_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_workspace(self, resource_group): + response = await self.client.saved_searches.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations.py new file mode 100644 index 000000000000..4af4e214066e --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementSchemaOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.schema.get( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations_async.py new file mode 100644 index 000000000000..a968c33a96d3 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementSchemaOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.schema.get( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations.py new file mode 100644 index 000000000000..c3f672c8316e --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementSharedKeysOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_shared_keys(self, resource_group): + response = self.client.shared_keys.get_shared_keys( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_regenerate(self, resource_group): + response = self.client.shared_keys.regenerate( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations_async.py new file mode 100644 index 000000000000..f38805abe402 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations_async.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementSharedKeysOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_shared_keys(self, resource_group): + response = await self.client.shared_keys.get_shared_keys( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_regenerate(self, resource_group): + response = await self.client.shared_keys.regenerate( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations.py new file mode 100644 index 000000000000..f36d19061e8c --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementStorageInsightConfigsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.storage_insight_configs.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + storage_insight_name="str", + parameters={ + "containers": ["str"], + "eTag": "str", + "id": "str", + "name": "str", + "status": {"state": "str", "description": "str"}, + "storageAccount": {"id": "str", "key": "str"}, + "tables": ["str"], + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.storage_insight_configs.get( + resource_group_name=resource_group.name, + workspace_name="str", + storage_insight_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.storage_insight_configs.delete( + resource_group_name=resource_group.name, + workspace_name="str", + storage_insight_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_workspace(self, resource_group): + response = self.client.storage_insight_configs.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations_async.py new file mode 100644 index 000000000000..7a6dffb0491e --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations_async.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementStorageInsightConfigsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.storage_insight_configs.create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + storage_insight_name="str", + parameters={ + "containers": ["str"], + "eTag": "str", + "id": "str", + "name": "str", + "status": {"state": "str", "description": "str"}, + "storageAccount": {"id": "str", "key": "str"}, + "tables": ["str"], + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.storage_insight_configs.get( + resource_group_name=resource_group.name, + workspace_name="str", + storage_insight_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.storage_insight_configs.delete( + resource_group_name=resource_group.name, + workspace_name="str", + storage_insight_name="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_workspace(self, resource_group): + response = self.client.storage_insight_configs.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations.py new file mode 100644 index 000000000000..157b5ce8fb0a --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations.py @@ -0,0 +1,249 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementTablesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_workspace(self, resource_group): + response = self.client.tables.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2022-10-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.tables.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + parameters={ + "archiveRetentionInDays": 0, + "id": "str", + "lastPlanModifiedDate": "str", + "name": "str", + "plan": "str", + "provisioningState": "str", + "restoredLogs": { + "azureAsyncOperationId": "str", + "endRestoreTime": "2020-02-20 00:00:00", + "sourceTable": "str", + "startRestoreTime": "2020-02-20 00:00:00", + }, + "resultStatistics": {"ingestedRecords": 0, "progress": 0.0, "scannedGb": 0.0}, + "retentionInDays": 0, + "retentionInDaysAsDefault": bool, + "schema": { + "categories": ["str"], + "columns": [ + { + "dataTypeHint": "str", + "description": "str", + "displayName": "str", + "isDefaultDisplay": bool, + "isHidden": bool, + "name": "str", + "type": "str", + } + ], + "description": "str", + "displayName": "str", + "labels": ["str"], + "name": "str", + "solutions": ["str"], + "source": "str", + "standardColumns": [ + { + "dataTypeHint": "str", + "description": "str", + "displayName": "str", + "isDefaultDisplay": bool, + "isHidden": bool, + "name": "str", + "type": "str", + } + ], + "tableSubType": "str", + "tableType": "str", + }, + "searchResults": { + "azureAsyncOperationId": "str", + "description": "str", + "endSearchTime": "2020-02-20 00:00:00", + "limit": 0, + "query": "str", + "sourceTable": "str", + "startSearchTime": "2020-02-20 00:00:00", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "totalRetentionInDays": 0, + "totalRetentionInDaysAsDefault": bool, + "type": "str", + }, + api_version="2022-10-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.tables.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + parameters={ + "archiveRetentionInDays": 0, + "id": "str", + "lastPlanModifiedDate": "str", + "name": "str", + "plan": "str", + "provisioningState": "str", + "restoredLogs": { + "azureAsyncOperationId": "str", + "endRestoreTime": "2020-02-20 00:00:00", + "sourceTable": "str", + "startRestoreTime": "2020-02-20 00:00:00", + }, + "resultStatistics": {"ingestedRecords": 0, "progress": 0.0, "scannedGb": 0.0}, + "retentionInDays": 0, + "retentionInDaysAsDefault": bool, + "schema": { + "categories": ["str"], + "columns": [ + { + "dataTypeHint": "str", + "description": "str", + "displayName": "str", + "isDefaultDisplay": bool, + "isHidden": bool, + "name": "str", + "type": "str", + } + ], + "description": "str", + "displayName": "str", + "labels": ["str"], + "name": "str", + "solutions": ["str"], + "source": "str", + "standardColumns": [ + { + "dataTypeHint": "str", + "description": "str", + "displayName": "str", + "isDefaultDisplay": bool, + "isHidden": bool, + "name": "str", + "type": "str", + } + ], + "tableSubType": "str", + "tableType": "str", + }, + "searchResults": { + "azureAsyncOperationId": "str", + "description": "str", + "endSearchTime": "2020-02-20 00:00:00", + "limit": 0, + "query": "str", + "sourceTable": "str", + "startSearchTime": "2020-02-20 00:00:00", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "totalRetentionInDays": 0, + "totalRetentionInDaysAsDefault": bool, + "type": "str", + }, + api_version="2022-10-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.tables.get( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.tables.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + api_version="2022-10-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_migrate(self, resource_group): + response = self.client.tables.migrate( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_cancel_search(self, resource_group): + response = self.client.tables.cancel_search( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations_async.py new file mode 100644 index 000000000000..11d406880927 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations_async.py @@ -0,0 +1,256 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementTablesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_workspace(self, resource_group): + response = self.client.tables.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2022-10-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.tables.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + parameters={ + "archiveRetentionInDays": 0, + "id": "str", + "lastPlanModifiedDate": "str", + "name": "str", + "plan": "str", + "provisioningState": "str", + "restoredLogs": { + "azureAsyncOperationId": "str", + "endRestoreTime": "2020-02-20 00:00:00", + "sourceTable": "str", + "startRestoreTime": "2020-02-20 00:00:00", + }, + "resultStatistics": {"ingestedRecords": 0, "progress": 0.0, "scannedGb": 0.0}, + "retentionInDays": 0, + "retentionInDaysAsDefault": bool, + "schema": { + "categories": ["str"], + "columns": [ + { + "dataTypeHint": "str", + "description": "str", + "displayName": "str", + "isDefaultDisplay": bool, + "isHidden": bool, + "name": "str", + "type": "str", + } + ], + "description": "str", + "displayName": "str", + "labels": ["str"], + "name": "str", + "solutions": ["str"], + "source": "str", + "standardColumns": [ + { + "dataTypeHint": "str", + "description": "str", + "displayName": "str", + "isDefaultDisplay": bool, + "isHidden": bool, + "name": "str", + "type": "str", + } + ], + "tableSubType": "str", + "tableType": "str", + }, + "searchResults": { + "azureAsyncOperationId": "str", + "description": "str", + "endSearchTime": "2020-02-20 00:00:00", + "limit": 0, + "query": "str", + "sourceTable": "str", + "startSearchTime": "2020-02-20 00:00:00", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "totalRetentionInDays": 0, + "totalRetentionInDaysAsDefault": bool, + "type": "str", + }, + api_version="2022-10-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.tables.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + parameters={ + "archiveRetentionInDays": 0, + "id": "str", + "lastPlanModifiedDate": "str", + "name": "str", + "plan": "str", + "provisioningState": "str", + "restoredLogs": { + "azureAsyncOperationId": "str", + "endRestoreTime": "2020-02-20 00:00:00", + "sourceTable": "str", + "startRestoreTime": "2020-02-20 00:00:00", + }, + "resultStatistics": {"ingestedRecords": 0, "progress": 0.0, "scannedGb": 0.0}, + "retentionInDays": 0, + "retentionInDaysAsDefault": bool, + "schema": { + "categories": ["str"], + "columns": [ + { + "dataTypeHint": "str", + "description": "str", + "displayName": "str", + "isDefaultDisplay": bool, + "isHidden": bool, + "name": "str", + "type": "str", + } + ], + "description": "str", + "displayName": "str", + "labels": ["str"], + "name": "str", + "solutions": ["str"], + "source": "str", + "standardColumns": [ + { + "dataTypeHint": "str", + "description": "str", + "displayName": "str", + "isDefaultDisplay": bool, + "isHidden": bool, + "name": "str", + "type": "str", + } + ], + "tableSubType": "str", + "tableType": "str", + }, + "searchResults": { + "azureAsyncOperationId": "str", + "description": "str", + "endSearchTime": "2020-02-20 00:00:00", + "limit": 0, + "query": "str", + "sourceTable": "str", + "startSearchTime": "2020-02-20 00:00:00", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "totalRetentionInDays": 0, + "totalRetentionInDaysAsDefault": bool, + "type": "str", + }, + api_version="2022-10-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.tables.get( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.tables.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + api_version="2022-10-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_migrate(self, resource_group): + response = await self.client.tables.migrate( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_cancel_search(self, resource_group): + response = await self.client.tables.cancel_search( + resource_group_name=resource_group.name, + workspace_name="str", + table_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations.py new file mode 100644 index 000000000000..64bc44c3d434 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementUsagesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.usages.list( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations_async.py new file mode 100644 index 000000000000..c1a19dcf2b39 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementUsagesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.usages.list( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2020-08-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations.py new file mode 100644 index 000000000000..ef87dcf94da4 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementWorkspacePurgeOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_purge(self, resource_group): + response = self.client.workspace_purge.purge( + resource_group_name=resource_group.name, + workspace_name="str", + body={"filters": [{"column": "str", "key": "str", "operator": "str", "value": {}}], "table": "str"}, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_purge_status(self, resource_group): + response = self.client.workspace_purge.get_purge_status( + resource_group_name=resource_group.name, + workspace_name="str", + purge_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations_async.py new file mode 100644 index 000000000000..ce90884ab927 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations_async.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementWorkspacePurgeOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_purge(self, resource_group): + response = await self.client.workspace_purge.purge( + resource_group_name=resource_group.name, + workspace_name="str", + body={"filters": [{"column": "str", "key": "str", "operator": "str", "value": {}}], "table": "str"}, + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_purge_status(self, resource_group): + response = await self.client.workspace_purge.get_purge_status( + resource_group_name=resource_group.name, + workspace_name="str", + purge_id="str", + api_version="2020-08-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations.py new file mode 100644 index 000000000000..66fc8f15243d --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations.py @@ -0,0 +1,161 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementWorkspacesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.workspaces.list( + api_version="2022-10-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2022-10-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.workspaces.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + parameters={ + "location": "str", + "createdDate": "str", + "customerId": "str", + "defaultDataCollectionRuleResourceId": "str", + "etag": "str", + "features": { + "clusterResourceId": "str", + "disableLocalAuth": bool, + "enableDataExport": bool, + "enableLogAccessUsingOnlyResourcePermissions": bool, + "immediatePurgeDataOn30Days": bool, + }, + "forceCmkForQuery": bool, + "id": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "modifiedDate": "str", + "name": "str", + "privateLinkScopedResources": [{"resourceId": "str", "scopeId": "str"}], + "provisioningState": "str", + "publicNetworkAccessForIngestion": "Enabled", + "publicNetworkAccessForQuery": "Enabled", + "retentionInDays": 0, + "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + "workspaceCapping": {"dailyQuotaGb": 0.0, "dataIngestionStatus": "str", "quotaNextResetTime": "str"}, + }, + api_version="2022-10-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.workspaces.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2022-10-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.workspaces.get( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.workspaces.update( + resource_group_name=resource_group.name, + workspace_name="str", + parameters={ + "createdDate": "str", + "customerId": "str", + "defaultDataCollectionRuleResourceId": "str", + "etag": "str", + "features": { + "clusterResourceId": "str", + "disableLocalAuth": bool, + "enableDataExport": bool, + "enableLogAccessUsingOnlyResourcePermissions": bool, + "immediatePurgeDataOn30Days": bool, + }, + "forceCmkForQuery": bool, + "id": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "modifiedDate": "str", + "name": "str", + "privateLinkScopedResources": [{"resourceId": "str", "scopeId": "str"}], + "provisioningState": "str", + "publicNetworkAccessForIngestion": "Enabled", + "publicNetworkAccessForQuery": "Enabled", + "retentionInDays": 0, + "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "str"}, + "tags": {"str": "str"}, + "type": "str", + "workspaceCapping": {"dailyQuotaGb": 0.0, "dataIngestionStatus": "str", "quotaNextResetTime": "str"}, + }, + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations_async.py new file mode 100644 index 000000000000..bbb4da741d11 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations_async.py @@ -0,0 +1,170 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementWorkspacesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.workspaces.list( + api_version="2022-10-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2022-10-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.workspaces.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + parameters={ + "location": "str", + "createdDate": "str", + "customerId": "str", + "defaultDataCollectionRuleResourceId": "str", + "etag": "str", + "features": { + "clusterResourceId": "str", + "disableLocalAuth": bool, + "enableDataExport": bool, + "enableLogAccessUsingOnlyResourcePermissions": bool, + "immediatePurgeDataOn30Days": bool, + }, + "forceCmkForQuery": bool, + "id": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "modifiedDate": "str", + "name": "str", + "privateLinkScopedResources": [{"resourceId": "str", "scopeId": "str"}], + "provisioningState": "str", + "publicNetworkAccessForIngestion": "Enabled", + "publicNetworkAccessForQuery": "Enabled", + "retentionInDays": 0, + "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + "workspaceCapping": { + "dailyQuotaGb": 0.0, + "dataIngestionStatus": "str", + "quotaNextResetTime": "str", + }, + }, + api_version="2022-10-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.workspaces.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2022-10-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.workspaces.get( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.workspaces.update( + resource_group_name=resource_group.name, + workspace_name="str", + parameters={ + "createdDate": "str", + "customerId": "str", + "defaultDataCollectionRuleResourceId": "str", + "etag": "str", + "features": { + "clusterResourceId": "str", + "disableLocalAuth": bool, + "enableDataExport": bool, + "enableLogAccessUsingOnlyResourcePermissions": bool, + "immediatePurgeDataOn30Days": bool, + }, + "forceCmkForQuery": bool, + "id": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "modifiedDate": "str", + "name": "str", + "privateLinkScopedResources": [{"resourceId": "str", "scopeId": "str"}], + "provisioningState": "str", + "publicNetworkAccessForIngestion": "Enabled", + "publicNetworkAccessForQuery": "Enabled", + "retentionInDays": 0, + "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "str"}, + "tags": {"str": "str"}, + "type": "str", + "workspaceCapping": {"dailyQuotaGb": 0.0, "dataIngestionStatus": "str", "quotaNextResetTime": "str"}, + }, + api_version="2022-10-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/sdk_packaging.toml b/sdk/loganalytics/azure-mgmt-loganalytics/sdk_packaging.toml index 71f68a323657..6da671ea6af5 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/sdk_packaging.toml +++ b/sdk/loganalytics/azure-mgmt-loganalytics/sdk_packaging.toml @@ -2,6 +2,6 @@ package_name = "azure-mgmt-loganalytics" package_pprint_name = "Log Analytics Management" package_doc_id = "" -is_stable = false +is_stable = true sample_link = "" title = "LogAnalyticsManagementClient" diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/setup.py b/sdk/loganalytics/azure-mgmt-loganalytics/setup.py index f82247ba729b..a670f6068bab 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/setup.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/setup.py @@ -1,10 +1,10 @@ #!/usr/bin/env python -#------------------------------------------------------------------------- +# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. -#-------------------------------------------------------------------------- +# -------------------------------------------------------------------------- import re import os.path @@ -16,64 +16,70 @@ PACKAGE_PPRINT_NAME = "Log Analytics Management" # a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace('-', '/') +package_folder_path = PACKAGE_NAME.replace("-", "/") # a-b-c => a.b.c -namespace_name = PACKAGE_NAME.replace('-', '.') +namespace_name = PACKAGE_NAME.replace("-", ".") # Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, 'version.py') - if os.path.exists(os.path.join(package_folder_path, 'version.py')) - else os.path.join(package_folder_path, '_version.py'), 'r') as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', - fd.read(), re.MULTILINE).group(1) +with open( + ( + os.path.join(package_folder_path, "version.py") + if os.path.exists(os.path.join(package_folder_path, "version.py")) + else os.path.join(package_folder_path, "_version.py") + ), + "r", +) as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: - raise RuntimeError('Cannot find version information') + raise RuntimeError("Cannot find version information") -with open('README.md', encoding='utf-8') as f: +with open("README.md", encoding="utf-8") as f: readme = f.read() -with open('CHANGELOG.md', encoding='utf-8') as f: +with open("CHANGELOG.md", encoding="utf-8") as f: changelog = f.read() setup( name=PACKAGE_NAME, version=version, - description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), - long_description=readme + '\n\n' + changelog, - long_description_content_type='text/markdown', - license='MIT License', - author='Microsoft Corporation', - author_email='azpysdkhelp@microsoft.com', - url='https://github.com/Azure/azure-sdk-for-python', + description="Microsoft Azure {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + long_description=readme + "\n\n" + changelog, + long_description_content_type="text/markdown", + license="MIT License", + author="Microsoft Corporation", + author_email="azpysdkhelp@microsoft.com", + url="https://github.com/Azure/azure-sdk-for-python", keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product classifiers=[ - 'Development Status :: 4 - Beta', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'License :: OSI Approved :: MIT License', + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: MIT License", ], zip_safe=False, - packages=find_packages(exclude=[ - 'tests', - # Exclude packages that will be covered by PEP420 or nspkg - 'azure', - 'azure.mgmt', - ]), + packages=find_packages( + exclude=[ + "tests", + # Exclude packages that will be covered by PEP420 or nspkg + "azure", + "azure.mgmt", + ] + ), include_package_data=True, package_data={ - 'pytyped': ['py.typed'], + "pytyped": ["py.typed"], }, install_requires=[ - "msrest>=0.7.1", - "azure-common~=1.1", - "azure-mgmt-core>=1.3.2,<2.0.0", - "typing-extensions>=4.3.0; python_version<'3.8.0'", + "isodate>=0.6.1", + "typing-extensions>=4.6.0", + "azure-common>=1.1", + "azure-mgmt-core>=1.3.2", ], - python_requires=">=3.7" + python_requires=">=3.8", ) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/tests/conftest.py b/sdk/loganalytics/azure-mgmt-loganalytics/tests/conftest.py index e8be084e4e0f..1fbe0ba3e8b2 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/tests/conftest.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/tests/conftest.py @@ -1,54 +1,41 @@ +# coding=utf-8 # -------------------------------------------------------------------------- -# # Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import os -import platform import pytest -import sys - from dotenv import load_dotenv - -from devtools_testutils import test_proxy, add_general_regex_sanitizer, add_header_regex_sanitizer, add_body_key_sanitizer - -# Ignore async tests for Python < 3.5 -collect_ignore_glob = [] -if sys.version_info < (3, 5) or platform.python_implementation() == "PyPy": - collect_ignore_glob.append("*_async.py") +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) load_dotenv() + +# aovid record sensitive identity information in recordings @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): - subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") - tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") - client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") - client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=subscription_id, value="00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=tenant_id, value="00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=client_id, value="00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=client_secret, value="00000000-0000-0000-0000-000000000000") + loganalyticsmanagement_subscription_id = os.environ.get( + "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + loganalyticsmanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + loganalyticsmanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + loganalyticsmanagement_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer( + regex=loganalyticsmanagement_subscription_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=loganalyticsmanagement_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loganalyticsmanagement_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer( + regex=loganalyticsmanagement_client_secret, value="00000000-0000-0000-0000-000000000000" + ) + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") add_header_regex_sanitizer(key="Cookie", value="cookie;") - add_body_key_sanitizer(json_path="$..access_token", value="access_token") \ No newline at end of file + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_deleted_workspaces_operations_async_test.py b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_deleted_workspaces_operations_async_test.py new file mode 100644 index 000000000000..028495c6563f --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_deleted_workspaces_operations_async_test.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestLogAnalyticsManagementDeletedWorkspacesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.deleted_workspaces.list() + result = [r async for r in response] + assert response + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.deleted_workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + assert result == [] diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_deleted_workspaces_operations_test.py b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_deleted_workspaces_operations_test.py new file mode 100644 index 000000000000..9ba49573c418 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_deleted_workspaces_operations_test.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestLogAnalyticsManagementDeletedWorkspacesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.deleted_workspaces.list() + result = [r for r in response] + assert response + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.deleted_workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + assert result == [] diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_operations_async_test.py b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_operations_async_test.py new file mode 100644 index 000000000000..48244f71afb1 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_operations_async_test.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestLogAnalyticsManagementOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.operations.list() + result = [r async for r in response] + assert result diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_operations_test.py b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_operations_test.py new file mode 100644 index 000000000000..f8b3d6a0c6ef --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_operations_test.py @@ -0,0 +1,26 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestLogAnalyticsManagementOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.operations.list() + result = [r for r in response] + assert result diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_workspaces_operations_async_test.py b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_workspaces_operations_async_test.py new file mode 100644 index 000000000000..f70e47b3897c --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_workspaces_operations_async_test.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestLogAnalyticsManagementWorkspacesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.workspaces.list() + result = [r async for r in response] + assert response + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + assert result == [] diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_workspaces_operations_test.py b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_workspaces_operations_test.py new file mode 100644 index 000000000000..7ce34c4f59ff --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_log_analytics_management_workspaces_operations_test.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestLogAnalyticsManagementWorkspacesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.workspaces.list() + result = [r for r in response] + assert response + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + assert result == [] diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_mgmt_loganalytics.py b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_mgmt_loganalytics.py deleted file mode 100644 index 532689623397..000000000000 --- a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_mgmt_loganalytics.py +++ /dev/null @@ -1,16 +0,0 @@ -import pytest -import azure.mgmt.loganalytics -from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy - -class TestMgmtLogAnalytics(AzureMgmtRecordedTestCase): - - def setup_method(self, method): - self.client = self.create_mgmt_client( - azure.mgmt.loganalytics.LogAnalyticsManagementClient - ) - - # @pytest.mark.skip('Hard to test') - @recorded_by_proxy - def test_loganalytics_operations(self): - operations = self.client.operations.list() - assert len(list(operations)) > 0 diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_workspace_test.py similarity index 50% rename from sdk/loganalytics/azure-mgmt-loganalytics/tests/test_workspace.py rename to sdk/loganalytics/azure-mgmt-loganalytics/tests/test_workspace_test.py index 0fa998bbefe3..1c3643f1f4be 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/tests/test_workspace_test.py @@ -2,27 +2,20 @@ import azure.mgmt.loganalytics from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy, ResourceGroupPreparer + +@pytest.mark.live_test_only class TestMgmtLogAnalyticsWorkspace(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client( - azure.mgmt.loganalytics.LogAnalyticsManagementClient - ) + self.client = self.create_mgmt_client(azure.mgmt.loganalytics.LogAnalyticsManagementClient) @ResourceGroupPreparer() @recorded_by_proxy def test_loganalytics_workspace(self, resource_group, location): - workspace_name = 'WorkspaceName' + workspace_name = "WorkspaceName" workspace_result = self.client.workspaces.begin_create_or_update( - resource_group.name, - workspace_name, - { - 'location': location - } + resource_group.name, workspace_name, {"location": location} ).result() - workspace = self.client.workspaces.get( - resource_group.name, - workspace_name - ) - assert workspace_result.name == workspace.name \ No newline at end of file + workspace = self.client.workspaces.get(resource_group.name, workspace_name) + assert workspace_result.name == workspace.name